diff --git a/.github/actions/setup-actionlint/action.yml b/.github/actions/setup-actionlint/action.yml index 539d34bf100..6605d5db1bc 100644 --- a/.github/actions/setup-actionlint/action.yml +++ b/.github/actions/setup-actionlint/action.yml @@ -4,7 +4,7 @@ description: Setup actionlint inputs: version: description: The version of actionlint - default: 1.6.23 + default: 1.6.24 cache-seed: required: true type: string diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index 745eb293932..1f228fd1822 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -22,7 +22,7 @@ inputs: required: false type: string description: The version of relenv to use - default: 0.10.2 + default: 0.12.3 outputs: version: diff --git a/.github/workflows/build-deb-packages.yml b/.github/workflows/build-deb-packages.yml index f9aa6af3ae3..3823a620ed1 100644 --- a/.github/workflows/build-deb-packages.yml +++ b/.github/workflows/build-deb-packages.yml @@ -22,9 +22,9 @@ jobs: arch: - x86_64 - aarch64 - src: - - true - - false + source: + - onedir + - src container: image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-11 @@ -79,34 +79,31 @@ jobs: - name: Build Deb working-directory: pkgs/checkout/ - if: ${{ matrix.src != true}} run: | - tools pkg build deb --onedir salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz - - - name: Build Deb - working-directory: pkgs/checkout/ - if: ${{ matrix.src == true}} - run: | - tools pkg build deb --arch ${{ matrix.arch }} + tools pkg build deb ${{ + matrix.source == 'onedir' && + format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) + || + format('--arch={0}', matrix.arch) + }} - name: Cleanup run: | rm -rf pkgs/checkout/ - - name: Upload DEBs - uses: actions/upload-artifact@v3 - if: ${{ matrix.src == false}} - with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb - path: ${{ github.workspace }}/pkgs/* - retention-days: 7 - if-no-files-found: error + - name: Set Artifact Name + id: set-artifact-name + run: | + if [ "${{ matrix.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload DEBs uses: actions/upload-artifact@v3 - if: ${{ matrix.src == true}} with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: ${{ github.workspace }}/pkgs/* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 8a703373f8f..ad788929cf9 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.10.2 + default: 0.12.3 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/build-macos-packages.yml b/.github/workflows/build-macos-packages.yml index 54a5af28bb8..9e07834fea0 100644 --- a/.github/workflows/build-macos-packages.yml +++ b/.github/workflows/build-macos-packages.yml @@ -8,6 +8,10 @@ on: type: string required: true description: The Salt version to set prior to building packages. + sign-packages: + type: boolean + default: false + description: Sign Packages environment: type: string description: The GitHub Environment where this workflow should run @@ -31,12 +35,23 @@ jobs: shell: bash id: check-pkg-sign run: | - if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then - echo "The packages created will NOT be signed" - echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + if [ "${{ inputs.sign-packages }}" == "true" ]; then + if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then + MSG="Secrets for signing packages are not available. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + else + MSG="The packages created WILL be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + fi else - echo "The packages created WILL be signed" - echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + MSG="The sign-packages input is false. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" fi - uses: actions/checkout@v3 diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index ef032568063..72464818307 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -25,9 +25,9 @@ jobs: arch: - x86_64 - aarch64 - src: - - true - - false + source: + - onedir + - src container: image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9 @@ -67,29 +67,27 @@ jobs: tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete - name: Build RPM - if: ${{ matrix.src != true}} run: | - tools pkg build rpm --onedir salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + tools pkg build rpm ${{ + matrix.source == 'onedir' && + format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) + || + format('--arch={0}', matrix.arch) + }} - - name: Build RPM - if: ${{ matrix.src == true}} + - name: Set Artifact Name + id: set-artifact-name run: | - tools pkg build rpm --arch ${{ matrix.arch }} + if [ "${{ matrix.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload RPMs uses: actions/upload-artifact@v3 - if: ${{ matrix.src != true}} with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm - path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm - retention-days: 7 - if-no-files-found: error - - - name: Upload RPMs - uses: actions/upload-artifact@v3 - if: ${{ matrix.src == true}} - with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 837c6cf30b8..2b1b758b42f 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -21,7 +21,7 @@ on: relenv-version: required: false type: string - default: 0.10.2 + default: 0.12.3 description: The version of relenv to use python-version-linux: required: false diff --git a/.github/workflows/build-windows-packages.yml b/.github/workflows/build-windows-packages.yml index 4831a59e718..b50d7cdc618 100644 --- a/.github/workflows/build-windows-packages.yml +++ b/.github/workflows/build-windows-packages.yml @@ -8,6 +8,10 @@ on: type: string required: true description: The Salt version to set prior to building packages + sign-packages: + type: boolean + default: false + description: Sign Packages environment: type: string description: The GitHub Environment where this workflow should run @@ -40,12 +44,23 @@ jobs: shell: bash id: check-pkg-sign run: | - if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then - echo "The packages created will NOT be signed" - echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + if [ "${{ inputs.sign-packages }}" == "true" ]; then + if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then + MSG="Secrets for signing packages are not available. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + else + MSG="The packages created WILL be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + fi else - echo "The packages created WILL be signed" - echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + MSG="The sign-packages input is false. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" fi - uses: actions/checkout@v3 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 49f7240dcc5..a18e21fcc5d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -37,6 +37,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -204,6 +206,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -434,7 +448,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -452,7 +466,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 1cc7062c94d..8291efe30fa 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -6,7 +6,16 @@ name: Nightly run-name: "Nightly (branch: ${{ github.ref_name }})" on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + skip-salt-test-suite: + type: boolean + default: false + description: Skip running the Salt test suite. + skip-salt-pkg-test-suite: + type: boolean + default: false + description: Skip running the Salt packages test suite. schedule: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule - cron: '0 1 * * *' # Every day at 1AM @@ -81,6 +90,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -242,12 +253,24 @@ jobs: - name: Define Jobs id: define-jobs run: | - tools ci define-jobs ${{ github.event_name }} changed-files.json + tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json - name: Check Defined Jobs run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -483,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -501,7 +524,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -536,6 +559,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: nightly + sign-packages: false secrets: inherit build-macos-pkgs: @@ -548,6 +572,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: nightly + sign-packages: true secrets: inherit amazonlinux-2-pkg-tests: @@ -1409,18 +1434,23 @@ jobs: - name: Create Repository run: | - tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error @@ -1530,7 +1560,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \ + tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1674,7 +1704,7 @@ jobs: SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \ + tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1769,7 +1799,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1845,7 +1875,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1957,7 +1987,7 @@ jobs: - name: Create Repository run: | - tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build \ + tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1972,6 +2002,7 @@ jobs: publish-repositories: name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux diff --git a/.github/workflows/update_winrepo.yml b/.github/workflows/release-update-winrepo.yml similarity index 100% rename from .github/workflows/update_winrepo.yml rename to .github/workflows/release-update-winrepo.yml diff --git a/.github/workflows/upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml similarity index 100% rename from .github/workflows/upload-virustotal.yml rename to .github/workflows/release-upload-virustotal.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3c44a807a99..61fc1f5783e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -44,25 +44,20 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - repo-release + environment: release needs: - check-requirements outputs: salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -93,11 +88,77 @@ jobs: run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Release Staged + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Set Cache Seed Output id: set-cache-seed run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + download-onedir-artifact: + name: Download Staging Onedir Artifact + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + runs-on: + - self-hosted + - linux + - repo-release + environment: release + needs: + - prepare-workflow + strategy: + fail-fast: false + matrix: + include: + - platform: linux + arch: x86_64 + - platform: linux + arch: aarch64 + - platform: windows + arch: amd64 + - platform: windows + arch: x86 + - platform: darwin + arch: x86_64 + steps: + - uses: actions/checkout@v3 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Download Onedir Tarball Artifact + run: | + tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }} + + - name: Upload Onedir Tarball as an Artifact + uses: actions/upload-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz* + retention-days: 7 + if-no-files-found: error + backup: name: Backup runs-on: @@ -107,6 +168,8 @@ jobs: needs: - prepare-workflow environment: release + outputs: + backup-complete: ${{ steps.backup.outputs.backup-complete }} steps: - name: Clone The Salt Repository @@ -121,6 +184,7 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts - name: Backup Previous Releases + id: backup run: | tools pkg repo backup-previous-releases @@ -133,6 +197,7 @@ jobs: needs: - prepare-workflow - backup + - download-onedir-artifact environment: release steps: @@ -155,8 +220,581 @@ jobs: run: | tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }} + almalinux-8-pkg-download-tests: + name: Test Alma Linux 8 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-8 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + almalinux-8-arm64-pkg-download-tests: + name: Test Alma Linux 8 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + almalinux-9-pkg-download-tests: + name: Test Alma Linux 9 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + almalinux-9-arm64-pkg-download-tests: + name: Test Alma Linux 9 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + amazonlinux-2-pkg-download-tests: + name: Test Amazon Linux 2 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + amazonlinux-2-arm64-pkg-download-tests: + name: Test Amazon Linux 2 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centos-7-pkg-download-tests: + name: Test CentOS 7 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centos-7-arm64-pkg-download-tests: + name: Test CentOS 7 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-8-pkg-download-tests: + name: Test CentOS Stream 8 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-8-arm64-pkg-download-tests: + name: Test CentOS Stream 8 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-9-pkg-download-tests: + name: Test CentOS Stream 9 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-9-arm64-pkg-download-tests: + name: Test CentOS Stream 9 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + debian-10-pkg-download-tests: + name: Test Debian 10 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-10 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + debian-11-pkg-download-tests: + name: Test Debian 11 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + debian-11-arm64-pkg-download-tests: + name: Test Debian 11 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-36-pkg-download-tests: + name: Test Fedora 36 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-36-arm64-pkg-download-tests: + name: Test Fedora 36 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-37-pkg-download-tests: + name: Test Fedora 37 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-37-arm64-pkg-download-tests: + name: Test Fedora 37 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-38-pkg-download-tests: + name: Test Fedora 38 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-38-arm64-pkg-download-tests: + name: Test Fedora 38 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + photonos-3-pkg-download-tests: + name: Test Photon OS 3 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-3 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + photonos-4-pkg-download-tests: + name: Test Photon OS 4 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-4 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2004-pkg-download-tests: + name: Test Ubuntu 20.04 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2004-arm64-pkg-download-tests: + name: Test Ubuntu 20.04 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2204-pkg-download-tests: + name: Test Ubuntu 22.04 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2204-arm64-pkg-download-tests: + name: Test Ubuntu 22.04 Arm64 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + macos-12-pkg-download-tests: + name: Test macOS 12 Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-macos.yml + with: + distro-slug: macos-12 + platform: darwin + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + windows-2022-nsis-amd64-pkg-download-tests: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows 2022 amd64 NSIS Package Downloads + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: NSIS + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + windows-2022-msi-amd64-pkg-download-tests: + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + name: Test Windows 2022 amd64 MSI Package Downloads + needs: + - prepare-workflow + - publish-repositories + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: MSI + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + release: name: Release v${{ needs.prepare-workflow.outputs.salt-version }} + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -165,6 +803,36 @@ jobs: - prepare-workflow - backup - publish-repositories + - almalinux-8-pkg-download-tests + - almalinux-8-arm64-pkg-download-tests + - almalinux-9-pkg-download-tests + - almalinux-9-arm64-pkg-download-tests + - amazonlinux-2-pkg-download-tests + - amazonlinux-2-arm64-pkg-download-tests + - centos-7-pkg-download-tests + - centos-7-arm64-pkg-download-tests + - centosstream-8-pkg-download-tests + - centosstream-8-arm64-pkg-download-tests + - centosstream-9-pkg-download-tests + - centosstream-9-arm64-pkg-download-tests + - debian-10-pkg-download-tests + - debian-11-pkg-download-tests + - debian-11-arm64-pkg-download-tests + - fedora-36-pkg-download-tests + - fedora-36-arm64-pkg-download-tests + - fedora-37-pkg-download-tests + - fedora-37-arm64-pkg-download-tests + - fedora-38-pkg-download-tests + - fedora-38-arm64-pkg-download-tests + - photonos-3-pkg-download-tests + - photonos-4-pkg-download-tests + - ubuntu-2004-pkg-download-tests + - ubuntu-2004-arm64-pkg-download-tests + - ubuntu-2204-pkg-download-tests + - ubuntu-2204-arm64-pkg-download-tests + - macos-12-pkg-download-tests + - windows-2022-nsis-amd64-pkg-download-tests + - windows-2022-msi-amd64-pkg-download-tests environment: release steps: - name: Clone The Salt Repository @@ -261,45 +929,12 @@ jobs: retention-days: 7 if-no-files-found: error - restore: - name: Restore Release Bucket From Backup - if: ${{ always() }} - runs-on: - - self-hosted - - linux - - repo-release - needs: - - release - environment: release - steps: - - name: Clone The Salt Repository - if: ${{ failure() || cancelled() }} - uses: actions/checkout@v3 - with: - ssh-key: ${{ secrets.GHA_SSH_KEY }} - - - name: Setup Rclone - if: ${{ failure() || cancelled() }} - uses: AnimMouse/setup-rclone@v1 - with: - version: v1.61.1 - - - name: Setup Python Tools Scripts - if: ${{ failure() || cancelled() }} - uses: ./.github/actions/setup-python-tools-scripts - - - name: Restore Release Bucket - if: ${{ failure() || cancelled() }} - run: | - tools pkg repo restore-previous-releases - publish-pypi: name: Publish to PyPi - if: ${{ github.event.repository.fork != true }} + if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} needs: - prepare-workflow - release - - restore environment: release runs-on: - self-hosted @@ -358,13 +993,21 @@ jobs: - prepare-workflow - publish-repositories - release - - restore - publish-pypi steps: - name: Get workflow information id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + - run: | + # shellcheck disable=SC2129 + if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then + echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + fi + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index b00b7e8d1e6..fda566fbb3e 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -80,6 +80,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -247,6 +249,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -477,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -495,7 +509,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 0c0f5c5f88a..2088976ec31 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -14,6 +14,10 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + sign-windows-packages: + type: boolean + default: false + description: Sign Windows Packages skip-salt-test-suite: type: boolean default: false @@ -53,17 +57,6 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run runs-on: ubuntu-latest @@ -77,6 +70,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -250,6 +245,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -484,7 +491,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -502,7 +509,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.11.2" + relenv-version: "0.12.3" python-version-linux: "3.10.11" python-version-macos: "3.10.11" python-version-windows: "3.10.11" @@ -537,6 +544,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging + sign-packages: ${{ inputs.sign-windows-packages }} secrets: inherit build-macos-pkgs: @@ -549,6 +557,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging + sign-packages: true secrets: inherit amazonlinux-2-pkg-tests: @@ -1414,14 +1423,19 @@ jobs: --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error @@ -1975,6 +1989,7 @@ jobs: publish-repositories: name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -2070,6 +2085,10 @@ jobs: name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub path: artifacts/release + - name: Show Release Artifacts + run: | + tree -a artifacts/release + - name: Upload Release Artifacts run: | tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release @@ -2084,24 +2103,494 @@ jobs: retention-days: 7 if-no-files-found: error - test-linux-pkg-downloads: - name: Test Linux Package Downloads + almalinux-8-pkg-download-tests: + name: Test Alma Linux 8 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - publish-repositories uses: ./.github/workflows/test-package-downloads-action-linux.yml with: - distro-slug: ubuntu-latest + distro-slug: almalinux-8 platform: linux + arch: x86_64 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit - test-macos-pkg-downloads: - name: Test macOS Package Downloads + almalinux-8-arm64-pkg-download-tests: + name: Test Alma Linux 8 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + almalinux-9-pkg-download-tests: + name: Test Alma Linux 9 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + almalinux-9-arm64-pkg-download-tests: + name: Test Alma Linux 9 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: almalinux-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + amazonlinux-2-pkg-download-tests: + name: Test Amazon Linux 2 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + amazonlinux-2-arm64-pkg-download-tests: + name: Test Amazon Linux 2 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: amazonlinux-2-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centos-7-pkg-download-tests: + name: Test CentOS 7 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centos-7-arm64-pkg-download-tests: + name: Test CentOS 7 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centos-7-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-8-pkg-download-tests: + name: Test CentOS Stream 8 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-8-arm64-pkg-download-tests: + name: Test CentOS Stream 8 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-8-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-9-pkg-download-tests: + name: Test CentOS Stream 9 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + centosstream-9-arm64-pkg-download-tests: + name: Test CentOS Stream 9 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: centosstream-9-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + debian-10-pkg-download-tests: + name: Test Debian 10 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-10 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + debian-11-pkg-download-tests: + name: Test Debian 11 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + debian-11-arm64-pkg-download-tests: + name: Test Debian 11 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: debian-11-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-36-pkg-download-tests: + name: Test Fedora 36 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-36-arm64-pkg-download-tests: + name: Test Fedora 36 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-36-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-37-pkg-download-tests: + name: Test Fedora 37 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-37-arm64-pkg-download-tests: + name: Test Fedora 37 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-37-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-38-pkg-download-tests: + name: Test Fedora 38 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + fedora-38-arm64-pkg-download-tests: + name: Test Fedora 38 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: fedora-38-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + photonos-3-pkg-download-tests: + name: Test Photon OS 3 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-3 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + photonos-4-pkg-download-tests: + name: Test Photon OS 4 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: photonos-4 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2004-pkg-download-tests: + name: Test Ubuntu 20.04 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2004-arm64-pkg-download-tests: + name: Test Ubuntu 20.04 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-20.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2204-pkg-download-tests: + name: Test Ubuntu 22.04 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04 + platform: linux + arch: x86_64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + ubuntu-2204-arm64-pkg-download-tests: + name: Test Ubuntu 22.04 Arm64 Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-linux.yml + with: + distro-slug: ubuntu-22.04-arm64 + platform: linux + arch: aarch64 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + macos-12-pkg-download-tests: + name: Test macOS 12 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -2110,15 +2599,17 @@ jobs: with: distro-slug: macos-12 platform: darwin + arch: x86_64 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit - test-windows-pkg-downloads: + windows-2022-nsis-amd64-pkg-download-tests: if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - name: Test Windows Package Downloads + name: Test Windows 2022 amd64 NSIS Package Downloads needs: - prepare-workflow - publish-repositories @@ -2126,10 +2617,32 @@ jobs: with: distro-slug: windows-2022 platform: windows + arch: amd64 + pkg-type: NSIS cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: staging skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + + windows-2022-msi-amd64-pkg-download-tests: + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + name: Test Windows 2022 amd64 MSI Package Downloads + needs: + - prepare-workflow + - publish-repositories + uses: ./.github/workflows/test-package-downloads-action-windows.yml + with: + distro-slug: windows-2022 + platform: windows + arch: amd64 + pkg-type: MSI + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit publish-pypi: @@ -2182,9 +2695,36 @@ jobs: - windows-2019-msi-pkg-tests - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests - - test-linux-pkg-downloads - - test-macos-pkg-downloads - - test-windows-pkg-downloads + - almalinux-8-pkg-download-tests + - almalinux-8-arm64-pkg-download-tests + - almalinux-9-pkg-download-tests + - almalinux-9-arm64-pkg-download-tests + - amazonlinux-2-pkg-download-tests + - amazonlinux-2-arm64-pkg-download-tests + - centos-7-pkg-download-tests + - centos-7-arm64-pkg-download-tests + - centosstream-8-pkg-download-tests + - centosstream-8-arm64-pkg-download-tests + - centosstream-9-pkg-download-tests + - centosstream-9-arm64-pkg-download-tests + - debian-10-pkg-download-tests + - debian-11-pkg-download-tests + - debian-11-arm64-pkg-download-tests + - fedora-36-pkg-download-tests + - fedora-36-arm64-pkg-download-tests + - fedora-37-pkg-download-tests + - fedora-37-arm64-pkg-download-tests + - fedora-38-pkg-download-tests + - fedora-38-arm64-pkg-download-tests + - photonos-3-pkg-download-tests + - photonos-4-pkg-download-tests + - ubuntu-2004-pkg-download-tests + - ubuntu-2004-arm64-pkg-download-tests + - ubuntu-2204-pkg-download-tests + - ubuntu-2204-arm64-pkg-download-tests + - macos-12-pkg-download-tests + - windows-2022-nsis-amd64-pkg-download-tests + - windows-2022-msi-amd64-pkg-download-tests environment: staging runs-on: - self-hosted diff --git a/.github/workflows/templates/build-deb-repo.yml.jinja b/.github/workflows/templates/build-deb-repo.yml.jinja index e44e9837a3a..4f88e516a3d 100644 --- a/.github/workflows/templates/build-deb-repo.yml.jinja +++ b/.github/workflows/templates/build-deb-repo.yml.jinja @@ -76,7 +76,7 @@ - name: Create Repository run: | - tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja index 409f6e12246..f4494b24d74 100644 --- a/.github/workflows/templates/build-macos-repo.yml.jinja +++ b/.github/workflows/templates/build-macos-repo.yml.jinja @@ -52,7 +52,7 @@ - name: Create Repository run: | - tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja index 29c555a82d7..b68049c9a5c 100644 --- a/.github/workflows/templates/build-onedir-repo.yml.jinja +++ b/.github/workflows/templates/build-onedir-repo.yml.jinja @@ -88,7 +88,7 @@ - name: Create Repository run: | - tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 87a87d69b28..7646dd6e3e5 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -18,6 +18,11 @@ salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" <%- if pkg_type in ("macos", "windows") and gh_environment %> environment: <{ gh_environment }> + <%- if pkg_type == "macos" %> + sign-packages: true + <%- else %> + sign-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %> + <%- endif %> secrets: inherit <%- endif %> diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 49316861d47..6f1b27c6eca 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -93,7 +93,7 @@ SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja index 0fcf4eed9ef..6a9c2634e96 100644 --- a/.github/workflows/templates/build-src-repo.yml.jinja +++ b/.github/workflows/templates/build-src-repo.yml.jinja @@ -52,18 +52,23 @@ - name: Create Repository run: | - tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/templates/build-windows-repo.yml.jinja b/.github/workflows/templates/build-windows-repo.yml.jinja index 2ffbfad6885..0142e5cc09c 100644 --- a/.github/workflows/templates/build-windows-repo.yml.jinja +++ b/.github/workflows/templates/build-windows-repo.yml.jinja @@ -70,7 +70,7 @@ - name: Create Repository run: | - tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ + tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index bbbae394ff3..59c2493b485 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -9,7 +9,6 @@ <%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %> <%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set prepare_actual_release = prepare_actual_release | default(False) %> -<%- set release_branches = ["master", "3006.x"] %> --- <%- block name %> name: <{ workflow_name }> @@ -90,6 +89,8 @@ jobs: testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -267,6 +268,18 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -324,6 +337,9 @@ jobs: id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + <%- block set_pipeline_exit_status_extra_steps %> + <%- endblock set_pipeline_exit_status_extra_steps %> + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index 292e84fd77c..d78faa49c0a 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -1,6 +1,8 @@ <%- set gh_environment = gh_environment|default("nightly") %> <%- set skip_test_coverage_check = skip_test_coverage_check|default("false") %> <%- set skip_junit_reports_check = skip_junit_reports_check|default("false") %> +<%- set prepare_workflow_skip_test_suite = "${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}" %> +<%- set prepare_workflow_skip_pkg_test_suite = "${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}" %> <%- set prepare_workflow_if_check = prepare_workflow_if_check|default("${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}") %> <%- extends 'ci.yml.jinja' %> @@ -14,7 +16,16 @@ run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }})" <%- block on %> on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + skip-salt-test-suite: + type: boolean + default: false + description: Skip running the Salt test suite. + skip-salt-pkg-test-suite: + type: boolean + default: false + description: Skip running the Salt packages test suite. schedule: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule - cron: '0 1 * * *' # Every day at 1AM @@ -53,6 +64,7 @@ concurrency: publish-repositories: <%- do conclusion_needs.append('publish-repositories') %> name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 73852ffefba..f5b3a456963 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -61,17 +61,6 @@ permissions: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> @@ -81,7 +70,11 @@ permissions: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + environment: <{ gh_environment }> <%- if prepare_workflow_needs %> needs: <%- for need in prepare_workflow_needs.iter(consume=False) %> @@ -91,6 +84,8 @@ permissions: outputs: salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - uses: actions/checkout@v3 with: @@ -121,6 +116,24 @@ permissions: run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + - name: Check Release Staged + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + - name: Set Cache Seed Output id: set-cache-seed run: | @@ -132,6 +145,54 @@ permissions: <%- block jobs %> <{- super() }> + download-onedir-artifact: + name: Download Staging Onedir Artifact + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + environment: <{ gh_environment }> + needs: + - prepare-workflow + strategy: + fail-fast: false + matrix: + include: + - platform: linux + arch: x86_64 + - platform: linux + arch: aarch64 + - platform: windows + arch: amd64 + - platform: windows + arch: x86 + - platform: darwin + arch: x86_64 + steps: + - uses: actions/checkout@v3 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Download Onedir Tarball Artifact + run: | + tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }} + + - name: Upload Onedir Tarball as an Artifact + uses: actions/upload-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz* + retention-days: 7 + if-no-files-found: error + backup: name: Backup runs-on: @@ -141,6 +202,8 @@ permissions: needs: - prepare-workflow environment: <{ gh_environment }> + outputs: + backup-complete: ${{ steps.backup.outputs.backup-complete }} steps: - name: Clone The Salt Repository @@ -155,6 +218,7 @@ permissions: uses: ./.github/actions/setup-python-tools-scripts - name: Backup Previous Releases + id: backup run: | tools pkg repo backup-previous-releases @@ -168,6 +232,7 @@ permissions: needs: - prepare-workflow - backup + - download-onedir-artifact environment: <{ gh_environment }> steps: @@ -197,6 +262,7 @@ permissions: release: <%- do conclusion_needs.append('release') %> name: Release v${{ needs.prepare-workflow.outputs.salt-version }} + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -304,15 +370,17 @@ permissions: retention-days: 7 if-no-files-found: error + {#- Disable automatic backup restore restore: <%- do conclusion_needs.append('restore') %> name: Restore Release Bucket From Backup - if: ${{ always() }} + if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }} runs-on: - self-hosted - linux - repo-<{ gh_environment }> needs: + - backup - release <%- for need in test_repo_needs.iter(consume=True) %> - <{ need }> @@ -320,34 +388,31 @@ permissions: environment: <{ gh_environment }> steps: - name: Clone The Salt Repository - if: ${{ failure() || cancelled() }} uses: actions/checkout@v3 with: ssh-key: ${{ secrets.GHA_SSH_KEY }} - name: Setup Rclone - if: ${{ failure() || cancelled() }} uses: AnimMouse/setup-rclone@v1 with: version: v1.61.1 - name: Setup Python Tools Scripts - if: ${{ failure() || cancelled() }} uses: ./.github/actions/setup-python-tools-scripts - name: Restore Release Bucket - if: ${{ failure() || cancelled() }} run: | tools pkg repo restore-previous-releases + #} publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> name: Publish to PyPi - if: ${{ github.event.repository.fork != true }} + if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} needs: - prepare-workflow - - release - - restore + - release {#- Disable automatic backup restore + - restore #} environment: <{ gh_environment }> runs-on: - self-hosted @@ -396,3 +461,15 @@ permissions: tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz <%- endblock jobs %> + +<%- block set_pipeline_exit_status_extra_steps %> + + - run: | + # shellcheck disable=SC2129 + if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then + echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + fi +<%- endblock set_pipeline_exit_status_extra_steps %> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index c34607eeeca..07c212a9d6c 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -27,6 +27,10 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + sign-windows-packages: + type: boolean + default: false + description: Sign Windows Packages skip-salt-test-suite: type: boolean default: false @@ -67,17 +71,6 @@ concurrency: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> @@ -132,6 +125,10 @@ concurrency: name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub path: artifacts/release + - name: Show Release Artifacts + run: | + tree -a artifacts/release + {#- - name: Download Release Documentation (PDF) diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index 6c2c956c9b1..ac826f6e9fe 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -1,8 +1,41 @@ - <%- set job_name = "test-linux-pkg-downloads" %> + + <%- set linux_pkg_tests = ( + ("almalinux-8", "Alma Linux 8", "x86_64"), + ("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64"), + ("almalinux-9", "Alma Linux 9", "x86_64"), + ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), + ("centos-7", "CentOS 7", "x86_64"), + ("centos-7-arm64", "CentOS 7 Arm64", "aarch64"), + ("centosstream-8", "CentOS Stream 8", "x86_64"), + ("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64"), + ("centosstream-9", "CentOS Stream 9", "x86_64"), + ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64"), + ("debian-10", "Debian 10", "x86_64"), + ("debian-11", "Debian 11", "x86_64"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), + ("fedora-36", "Fedora 36", "x86_64"), + ("fedora-36-arm64", "Fedora 36 Arm64", "aarch64"), + ("fedora-37", "Fedora 37", "x86_64"), + ("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"), + ("fedora-38", "Fedora 38", "x86_64"), + ("fedora-38-arm64", "Fedora 38 Arm64", "aarch64"), + ("photonos-3", "Photon OS 3", "x86_64"), + ("photonos-4", "Photon OS 4", "x86_64"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64") + ) %> + + + <%- for slug, display_name, arch in linux_pkg_tests %> + <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> - name: Test Linux Package Downloads + name: Test <{ display_name }> Package Downloads <%- if gh_environment == "staging" %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> @@ -11,25 +44,32 @@ needs: - prepare-workflow - publish-repositories + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- endif %> uses: ./.github/workflows/test-package-downloads-action-linux.yml with: - distro-slug: ubuntu-latest + distro-slug: <{ slug }> platform: linux + arch: <{ arch }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true - <%- if gh_environment == "release" %> - artifacts-from-workflow: staging.yml - <%- endif %> + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit + <%- endfor %> - <%- set job_name = "test-macos-pkg-downloads" %> + + <%- for slug, display_name, arch in ( + ("macos-12", "macOS 12", "x86_64"), + ) %> + <%- set job_name = "{}-pkg-download-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> - name: Test macOS Package Downloads + name: Test <{ display_name }> Package Downloads <%- if gh_environment == "staging" %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> @@ -38,21 +78,28 @@ needs: - prepare-workflow - publish-repositories + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- endif %> uses: ./.github/workflows/test-package-downloads-action-macos.yml with: - distro-slug: macos-12 + distro-slug: <{ slug }> platform: darwin + arch: <{ arch }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true - <%- if gh_environment == "release" %> - artifacts-from-workflow: staging.yml - <%- endif %> + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit + <%- endfor %> - <%- set job_name = "test-windows-pkg-downloads" %> + <%- for slug, display_name, arch in ( + ("windows-2022", "Windows 2022", "amd64"), + ) %> + <%- for pkg_type in ("NSIS", "MSI") %> + <%- set job_name = "{}-{}-{}-pkg-download-tests".format(slug.replace(".", ""), pkg_type.lower(), arch.lower()) %> <{ job_name }>: <%- do test_repo_needs.append(job_name) %> @@ -61,19 +108,24 @@ <%- else %> if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} <%- endif %> - name: Test Windows Package Downloads + name: Test <{ display_name }> <{ arch }> <{ pkg_type }> Package Downloads needs: - prepare-workflow - publish-repositories + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- endif %> uses: ./.github/workflows/test-package-downloads-action-windows.yml with: - distro-slug: windows-2022 + distro-slug: <{ slug }> platform: windows + arch: <{ arch }> + pkg-type: <{ pkg_type }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" environment: <{ gh_environment }> skip-code-coverage: true - <%- if gh_environment == "release" %> - artifacts-from-workflow: staging.yml - <%- endif %> + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit + <%- endfor %> + <%- endfor %> diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 81dcc99ba20..b8088c64522 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -227,6 +227,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 @@ -515,10 +517,10 @@ jobs: run: | tree -a artifacts - - name: Set up Python 3.9 + - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "${{ inputs.python-version }}" - name: Install Nox run: | diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index f7220b21d6e..0982e7446eb 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -248,6 +248,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: PyPi Proxy run: | @@ -490,11 +492,6 @@ jobs: run: | tree -a artifacts - - name: Set up Python 3.9 - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - name: Install Nox run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index 65c9b60e18e..ee67c4d4020 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -11,6 +11,10 @@ on: required: true type: string description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested salt-version: type: string required: true @@ -23,11 +27,10 @@ on: required: true type: string description: The environment to run tests against - python-version: - required: false + latest-release: + required: true type: string - description: The python version to run tests with - default: "3.10" + description: The latest salt release package-name: required: false type: string @@ -43,13 +46,6 @@ on: type: string description: The nox session to run default: test-pkgs-onedir - artifacts-from-workflow: - required: false - type: string - description: > - Which workflow to download artifacts from. An empty string means the - current workflow run. - default: "" env: @@ -62,37 +58,13 @@ env: jobs: - generate-matrix: - name: Generate Package Test Matrix - runs-on: ubuntu-latest - outputs: - arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }} - test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix - run: | - tools ci pkg-download-matrix linux - dependencies: name: Setup Test Dependencies - needs: - - generate-matrix runs-on: - self-hosted - linux - - ${{ matrix.arch }} + - bastion timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }} steps: - name: Checkout Source Code uses: actions/checkout@v3 @@ -102,24 +74,12 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact - if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Download Onedir Tarball as an Artifact(from a different workflow) - if: inputs.artifacts-from-workflow != '' - uses: dawidd6/action-download-artifact@v2 - with: - workflow: ${{ inputs.artifacts-from-workflow }} - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -128,43 +88,64 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - - name: Install Nox + - name: Setup Python Tools Scripts + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} + + - name: List Free Space + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} - name: Install Dependencies if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - env: - PRINT_TEST_SELECTION: "0" - PRINT_SYSTEM_INFO: "0" run: | - nox --force-color --install-only -e ${{ inputs.nox-session }} + tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} - name: Cleanup .nox Directory if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - nox --force-color -e "pre-archive-cleanup(pkg=False)" + tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} - name: Compress .nox Directory if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }} + tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} - - name: Upload Onedir Tarball as an Artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz* - retention-days: 7 - if-no-files-found: error + - name: Download Compressed .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + + - name: Destroy VM + if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: @@ -172,16 +153,11 @@ jobs: runs-on: - self-hosted - linux - - ${{ matrix.arch }} + - bastion environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - dependencies - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -190,102 +166,128 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ - name: Decompress Onedir Tarball shell: bash run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - - - name: Install Nox - run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} - name: Decompress .nox Directory run: | - nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }} + tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} - name: Show System Info & Test Plan env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_TEST_SELECTION: "1" - PRINT_TEST_PLAN_ONLY: "1" - PRINT_SYSTEM_INFO: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" - run: | - nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs - - - name: Run Package Download Tests - env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_TEST_SELECTION: "0" - PRINT_TEST_PLAN_ONLY: "0" - PRINT_SYSTEM_INFO: "0" - RERUN_FAILURES: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} - + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" run: | - nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs - - name: Fix file ownership + - name: Run Package Download Tests + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ inputs.arch }} + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" run: | - sudo chown -R "$(id -un)" . + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' run: | - nox --force-color -e combine-coverage + tools --timestamps vm combine-coverage ${{ inputs.distro-slug }} - - name: Prepare Test Run Artifacts + - name: Download Test Run Artifacts id: download-artifacts-from-vm - if: always() && job.status != 'cancelled' + if: always() && steps.spin-up-vm.outcome == 'success' run: | + tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} # Delete the salt onedir, we won't need it anymore and it will prevent # from it showing in the tree command below rm -rf artifacts/salt* tree -a artifacts + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + - name: Upload Test Run Artifacts - if: always() && job.status != 'cancelled' + if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: | artifacts !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }}) - runs-on: ubuntu-latest + name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) + runs-on: + - self-hosted + - linux + - x86_64 + environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -295,7 +297,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: artifacts - name: Show Test Run Artifacts @@ -303,12 +305,6 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 - with: - python-version: "${{ inputs.python-version }}" - update-environment: true - - name: Install Nox run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' @@ -318,6 +314,6 @@ jobs: # always run even if the previous steps fails if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-package-downloads-action-macos.yml b/.github/workflows/test-package-downloads-action-macos.yml index b68542be268..ec985efbcee 100644 --- a/.github/workflows/test-package-downloads-action-macos.yml +++ b/.github/workflows/test-package-downloads-action-macos.yml @@ -11,6 +11,10 @@ on: required: true type: string description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested salt-version: type: string required: true @@ -23,6 +27,10 @@ on: required: true type: string description: The environment to run tests against + latest-release: + required: true + type: string + description: The latest salt release python-version: required: false type: string @@ -43,13 +51,6 @@ on: type: string description: The nox session to run default: test-pkgs-onedir - artifacts-from-workflow: - required: false - type: string - description: > - Which workflow to download artifacts from. An empty string means the - current workflow run. - default: "" env: @@ -62,34 +63,10 @@ env: jobs: - generate-matrix: - name: Generate Package Test Matrix - runs-on: ubuntu-latest - outputs: - arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }} - test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix - run: | - tools ci pkg-download-matrix macos - dependencies: name: Setup Test Dependencies - needs: - - generate-matrix runs-on: ${{ inputs.distro-slug }} timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }} steps: - name: Checkout Source Code uses: actions/checkout@v3 @@ -99,24 +76,12 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact - if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Download Onedir Tarball as an Artifact(from a different workflow) - if: inputs.artifacts-from-workflow != '' - uses: dawidd6/action-download-artifact@v2 - with: - workflow: ${{ inputs.artifacts-from-workflow }} - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -125,7 +90,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Set up Python ${{ inputs.python-version }} if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -166,18 +131,10 @@ jobs: run: | nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }} - - name: Upload Onedir Tarball as an Artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz* - retention-days: 7 - if-no-files-found: error - - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-download-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: @@ -187,11 +144,6 @@ jobs: timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - dependencies - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -200,8 +152,8 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ - name: Install System Dependencies run: | @@ -212,7 +164,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 @@ -228,7 +180,9 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Decompress .nox Directory run: | @@ -236,6 +190,7 @@ jobs: - name: Show System Info & Test Plan env: + SALT_RELEASE: "${{ inputs.salt-version }}" SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "1" PRINT_TEST_PLAN_ONLY: "1" @@ -243,6 +198,7 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" run: | sudo -E nox --force-color -e ${{ inputs.nox-session }} -- download-pkgs @@ -257,7 +213,8 @@ jobs: SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -288,24 +245,19 @@ jobs: if: always() && job.status != 'cancelled' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: | artifacts !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }}) + name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) runs-on: ubuntu-latest environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -315,7 +267,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: artifacts - name: Show Test Run Artifacts @@ -337,6 +289,6 @@ jobs: # always run even if the previous steps fails if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index c13ef47d62c..10d4462e451 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -11,6 +11,14 @@ on: required: true type: string description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested + pkg-type: + required: true + type: string + description: The platform arch being tested salt-version: type: string required: true @@ -23,11 +31,10 @@ on: required: true type: string description: The environment to run tests against - python-version: - required: false + latest-release: + required: true type: string - description: The python version to run tests with - default: "3.10" + description: The latest salt release package-name: required: false type: string @@ -48,13 +55,6 @@ on: type: boolean description: Skip Publishing JUnit Reports default: false - artifacts-from-workflow: - required: false - type: string - description: > - Which workflow to download artifacts from. An empty string means the - current workflow run. - default: "" env: @@ -67,40 +67,13 @@ env: jobs: - generate-matrix: - name: Generate Package Test Matrix - runs-on: - - self-hosted - - linux - - x86_64 - outputs: - arch-matrix-include: ${{ steps.generate-pkg-matrix.outputs.arch }} - test-matrix-include: ${{ steps.generate-pkg-matrix.outputs.tests }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix - run: | - tools ci pkg-download-matrix windows - dependencies: name: Setup Test Dependencies - needs: - - generate-matrix runs-on: - self-hosted - linux - bastion timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.arch-matrix-include) }} steps: - name: Checkout Source Code uses: actions/checkout@v3 @@ -110,24 +83,12 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - name: Download Onedir Tarball as an Artifact - if: inputs.artifacts-from-workflow == '' uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/ - - - name: Download Onedir Tarball as an Artifact(from a different workflow) - if: inputs.artifacts-from-workflow != '' - uses: dawidd6/action-download-artifact@v2 - with: - workflow: ${{ inputs.artifacts-from-workflow }} - workflow_conclusion: "" - branch: ${{ github.event.ref }} - if_no_artifact_found: fail - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -136,7 +97,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Setup Python Tools Scripts if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -190,18 +151,10 @@ jobs: run: | tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} - - name: Upload Onedir Tarball as an Artifact - uses: actions/upload-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz - path: artifacts/${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz* - retention-days: 7 - if-no-files-found: error - - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-download-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: @@ -213,12 +166,7 @@ jobs: environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - - generate-matrix - dependencies - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -227,7 +175,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -235,13 +183,15 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ matrix.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|test-pkg-download-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -271,9 +221,10 @@ jobs: - name: Show System Info & Test Plan env: - INSTALL_TYPE: ${{ matrix.install_type }} + INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.install_arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -288,9 +239,10 @@ jobs: - name: Run Package Download Tests env: - INSTALL_TYPE: ${{ matrix.install_type }} + INSTALL_TYPE: ${{ inputs.pkg-type }} SALT_RELEASE: "${{ inputs.salt-version }}" - SALT_REPO_ARCH: ${{ matrix.install_arch }} + SALT_REPO_ARCH: ${{ inputs.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" SALT_REPO_TYPE: ${{ inputs.environment }} SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} @@ -331,24 +283,22 @@ jobs: if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: | artifacts !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.arch }}) - runs-on: ubuntu-latest + name: Reports for ${{ inputs.distro-slug }}(${{ inputs.arch }}) + runs-on: + - self-hosted + - linux + - x86_64 environment: ${{ inputs.environment }} if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.test-matrix-include) }} steps: - name: Checkout Source Code @@ -358,7 +308,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.arch }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.arch }} path: artifacts - name: Show Test Run Artifacts @@ -366,11 +316,6 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v4 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ env.NOX_VERSION }}' @@ -380,6 +325,6 @@ jobs: # always run even if the previous steps fails if: always() && github.event_name == 'push' && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.arch }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ inputs.arch }} ${{ inputs.pkg-type }} ) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index d2cda1f4d48..b7de16fb5ac 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -154,7 +154,7 @@ jobs: - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }} path: nox.${{ inputs.distro-slug }}.tar.* test: @@ -214,6 +214,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Decompress .nox Directory run: | diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index d167eda634e..71affc00877 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -166,10 +166,19 @@ jobs: run: | tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} + - name: Define Nox Upload Artifact Name + id: nox-artifact-name + run: | + if [ "${{ contains(inputs.distro-slug, 'windows') }}" != "true" ]; then + echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}" + else + echo "name=nox-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.nox-session }}-${{ inputs.arch }}" >> "${GITHUB_OUTPUT}" + fi + - name: Upload Nox Requirements Tarball uses: actions/upload-artifact@v3 with: - name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + name: ${{ steps.nox-artifact-name.outputs.name }} path: nox.${{ inputs.distro-slug }}.tar.* test: @@ -219,6 +228,8 @@ jobs: with: path: nox.${{ inputs.distro-slug }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ccd0bd7d2b..9a2a0b98f27 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -63,7 +63,7 @@ repos: - id: tools alias: generate-workflows name: Generate GitHub Workflow Templates - files: ^(tools/pre_commit\.py|.github/workflows/templates/.*)$ + files: ^(cicd/shared-gh-workflows-context\.yml|tools/pre_commit\.py|.github/workflows/templates/.*)$ pass_filenames: false args: - pre-commit diff --git a/CHANGELOG.md b/CHANGELOG.md index 834323eec49..1132b94882b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,42 @@ Versions are `MAJOR.PATCH`. # Changelog +## 3006.1 (2023-05-05) + + +### Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + ## 3006.0 (2023-04-18) diff --git a/changelog/62477.fixed.md b/changelog/62477.fixed.md deleted file mode 100644 index 88f47bdb4bd..00000000000 --- a/changelog/62477.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Ensure NamedLoaderContext's have their value() used if passing to other modules diff --git a/changelog/63589.fixed.md b/changelog/63589.fixed.md deleted file mode 100644 index 1f63f9ee993..00000000000 --- a/changelog/63589.fixed.md +++ /dev/null @@ -1 +0,0 @@ -add documentation note about reactor state ids. diff --git a/changelog/64082.fixed.md b/changelog/64082.fixed.md deleted file mode 100644 index c5bbc5a0ccb..00000000000 --- a/changelog/64082.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix dmsetup device names with hyphen being picked up. diff --git a/changelog/64114.fixed.md b/changelog/64114.fixed.md deleted file mode 100644 index f01c5ea9127..00000000000 --- a/changelog/64114.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. diff --git a/changelog/64117.fixed.md b/changelog/64117.fixed.md deleted file mode 100644 index 0bca97e167d..00000000000 --- a/changelog/64117.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package diff --git a/changelog/64118.fixed.md b/changelog/64118.fixed.md deleted file mode 100644 index e7251827e97..00000000000 --- a/changelog/64118.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` diff --git a/changelog/64126.fixed.md b/changelog/64126.fixed.md deleted file mode 100644 index fb6cf7c46b4..00000000000 --- a/changelog/64126.fixed.md +++ /dev/null @@ -1 +0,0 @@ -lgpo_reg.set_value now returns ``True`` on success instead of ``None`` diff --git a/changelog/64150.fixed.md b/changelog/64150.fixed.md deleted file mode 100644 index a767e10bf8d..00000000000 --- a/changelog/64150.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix cmd.run doesn't output changes in test mode diff --git a/changelog/64170.fixed.md b/changelog/64170.fixed.md deleted file mode 100644 index 1d20355bf1e..00000000000 --- a/changelog/64170.fixed.md +++ /dev/null @@ -1,2 +0,0 @@ -Fixed issue in salt-cloud so that multiple masters specified in the cloud -are written to the minion config properly diff --git a/cicd/golden-images.json b/cicd/golden-images.json index 0f338a3992b..75341e64aeb 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -97,7 +97,7 @@ "cloudwatch-agent-available": "true", "instance_type": "m6g.large", "is_windows": "false", - "ssh_username": "cloud-user" + "ssh_username": "centos" }, "centosstream-8": { "ami": "ami-055e35dc7180defad", diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index c6e88fc0c3d..ec3d939fe03 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,4 +1,4 @@ python_version_linux: "3.10.11" python_version_macos: "3.10.11" python_version_windows: "3.10.11" -relenv_version: "0.11.2" +relenv_version: "0.12.3" diff --git a/conf/master b/conf/master index f542051d762..2c0a5c9cb87 100644 --- a/conf/master +++ b/conf/master @@ -1025,6 +1025,7 @@ # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/conf/suse/master b/conf/suse/master index 7168441dc41..863d8790240 100644 --- a/conf/suse/master +++ b/conf/suse/master @@ -950,6 +950,7 @@ syndic_user: salt # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/doc/man/salt-api.1 b/doc/man/salt-api.1 index 0935a743675..379d345536a 100644 --- a/doc/man/salt-api.1 +++ b/doc/man/salt-api.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-API" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-API" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-api \- salt-api Command .sp diff --git a/doc/man/salt-call.1 b/doc/man/salt-call.1 index f81101ae003..460cf91dddb 100644 --- a/doc/man/salt-call.1 +++ b/doc/man/salt-call.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CALL" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-CALL" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-call \- salt-call Documentation .SH SYNOPSIS diff --git a/doc/man/salt-cloud.1 b/doc/man/salt-cloud.1 index c0bc776c4f1..2a75e218e04 100644 --- a/doc/man/salt-cloud.1 +++ b/doc/man/salt-cloud.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CLOUD" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-CLOUD" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-cloud \- Salt Cloud Command .sp diff --git a/doc/man/salt-cp.1 b/doc/man/salt-cp.1 index c9cfd69ba8b..74ab95a2bcc 100644 --- a/doc/man/salt-cp.1 +++ b/doc/man/salt-cp.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CP" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-CP" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-cp \- salt-cp Documentation .sp diff --git a/doc/man/salt-key.1 b/doc/man/salt-key.1 index 0ff7822c6f9..c4723cae0e0 100644 --- a/doc/man/salt-key.1 +++ b/doc/man/salt-key.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-KEY" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-KEY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-key \- salt-key Documentation .SH SYNOPSIS diff --git a/doc/man/salt-master.1 b/doc/man/salt-master.1 index 8eb527cadf3..72fa39ba91d 100644 --- a/doc/man/salt-master.1 +++ b/doc/man/salt-master.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MASTER" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-MASTER" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-master \- salt-master Documentation .sp diff --git a/doc/man/salt-minion.1 b/doc/man/salt-minion.1 index 32421a3f24f..fc550d0085f 100644 --- a/doc/man/salt-minion.1 +++ b/doc/man/salt-minion.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MINION" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-MINION" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-minion \- salt-minion Documentation .sp diff --git a/doc/man/salt-proxy.1 b/doc/man/salt-proxy.1 index 57084cc6101..9a78879db3a 100644 --- a/doc/man/salt-proxy.1 +++ b/doc/man/salt-proxy.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-PROXY" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-PROXY" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-proxy \- salt-proxy Documentation .sp diff --git a/doc/man/salt-run.1 b/doc/man/salt-run.1 index 69b75f76854..d4fbc53dc98 100644 --- a/doc/man/salt-run.1 +++ b/doc/man/salt-run.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-RUN" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-RUN" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-run \- salt-run Documentation .sp diff --git a/doc/man/salt-ssh.1 b/doc/man/salt-ssh.1 index d3d9749f531..3519bb75e1f 100644 --- a/doc/man/salt-ssh.1 +++ b/doc/man/salt-ssh.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SSH" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-SSH" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-ssh \- salt-ssh Documentation .SH SYNOPSIS diff --git a/doc/man/salt-syndic.1 b/doc/man/salt-syndic.1 index 9480755a145..3b50a769071 100644 --- a/doc/man/salt-syndic.1 +++ b/doc/man/salt-syndic.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SYNDIC" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT-SYNDIC" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt-syndic \- salt-syndic Documentation .sp diff --git a/doc/man/salt.1 b/doc/man/salt.1 index 0d522736c30..1c6873a02e1 100644 --- a/doc/man/salt.1 +++ b/doc/man/salt.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt \- salt .SH SYNOPSIS diff --git a/doc/man/salt.7 b/doc/man/salt.7 index 7a08e3aac1b..d50a2d55401 100644 --- a/doc/man/salt.7 +++ b/doc/man/salt.7 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "7" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SALT" "7" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME salt \- Salt Documentation .SH SALT PROJECT @@ -1346,7 +1346,7 @@ You can enable or disable test groups locally by passing their respected flag: \-\-flaky\-jail \- Test that need to be temporarily skipped. .UNINDENT .sp -In Your PR, you can enable or disable test groups by setting a label. +In your PR, you can enable or disable test groups by setting a label. All fast, slow, and core tests specified in the change file will always run. .INDENT 0.0 .IP \(bu 2 @@ -1380,7 +1380,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the \fBsalt/changelog\fP directory, using the \fB.\fP format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: .INDENT 0.0 .INDENT 3.5 .sp @@ -7377,6 +7377,7 @@ Default: \fB3600\fP If and only if a master has set \fBpillar_cache: True\fP, the cache TTL controls the amount of time, in seconds, before the cache is considered invalid by a master and a fresh pillar is recompiled and stored. +The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. .SS \fBpillar_cache_backend\fP .sp New in version 2015.8.8. @@ -11635,10 +11636,7 @@ Changed in version 2018.3.0: Renamed from \fBenvironment\fP to \fBsaltenv\fP\&. ignored and \fBsaltenv\fP will be used. .sp -Normally the minion is not isolated to any single environment on the master -when running states, but the environment can be isolated on the minion side -by statically setting it. Remember that the recommended way to manage -environments is to isolate via the top file. +The default fileserver environment to use when copying files and applying states. .INDENT 0.0 .INDENT 3.5 .sp @@ -14934,6 +14932,7 @@ For reference, see: # If and only if a master has set \(ga\(gapillar_cache: True\(ga\(ga, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set \(gapillar_cache: True\(ga, one of several storage providers @@ -50676,7 +50675,7 @@ You can enable or disable test groups locally by passing there respected flag: \-\-flaky\-jail .UNINDENT .sp -In Your PR you can enable or disable test groups by setting a label. +In your PR you can enable or disable test groups by setting a label. All thought the fast, slow and core tests specified in the change file will always run. .INDENT 0.0 .IP \(bu 2 @@ -61394,7 +61393,7 @@ Add the following to \fB/srv/reactor/revert.sls\fP: .ft C revert\-file: local.state.apply: - \- tgt: {{ data[\(aqdata\(aq][\(aqid\(aq] }} + \- tgt: {{ data[\(aqid\(aq] }} \- arg: \- maintain_important_file .ft P @@ -61411,13 +61410,6 @@ to modify the watched file, it is important to ensure the state applied is also \fI\%idempotent\fP\&. .UNINDENT .UNINDENT -.sp -\fBNOTE:\fP -.INDENT 0.0 -.INDENT 3.5 -The expression \fB{{ data[\(aqdata\(aq][\(aqid\(aq] }}\fP \fI\%is correct\fP as it matches the event structure \fI\%shown above\fP\&. -.UNINDENT -.UNINDENT .SS State SLS .sp Create the state sls file referenced by the reactor sls file. This state file @@ -61838,6 +61830,14 @@ in \fI\%local reactions\fP, but as noted above this is not very user\-friendly. Therefore, the new config schema is recommended if the master is running a supported release. .sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +State ids of reactors for runners and wheels should all be unique. They can +overwrite each other when added to the async queue causing lost reactions. +.UNINDENT +.UNINDENT +.sp The below two examples are equivalent: .TS center; @@ -61909,6 +61909,14 @@ Like \fI\%runner reactions\fP, the old config schema called for wheel reactions to have arguments passed directly under the name of the \fI\%wheel function\fP (or in \fBarg\fP or \fBkwarg\fP parameters). .sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +State ids of reactors for runners and wheels should all be unique. They can +overwrite each other when added to the async queue causing lost reactions. +.UNINDENT +.UNINDENT +.sp The below two examples are equivalent: .TS center; @@ -193929,7 +193937,7 @@ Passes through all the parameters described in the \fI\%utils.http.query function\fP: .INDENT 7.0 .TP -.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.0\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) +.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.1\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) Query a resource, and decode the return data .UNINDENT .INDENT 7.0 @@ -280078,6 +280086,10 @@ proceess, as grains can affect which modules are available. .B refresh_pillar True Set to \fBFalse\fP to keep pillar data from being refreshed. +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280273,7 +280285,7 @@ salt \(aq*\(aq saltutil.signal_job 15 .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) Changed in version 2015.8.11,2016.3.2: On masterless minions, pillar modules are now synced, and refreshed when \fBrefresh\fP is set to \fBTrue\fP\&. @@ -280323,6 +280335,10 @@ dictionary of modules to sync based on type .B extmod_blacklist None dictionary of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280519,7 +280535,7 @@ salt \(aq*\(aq saltutil.sync_executors saltenv=base,dev .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) New in version 0.10.0. .sp @@ -280547,6 +280563,10 @@ comma\-separated list of modules to sync .B extmod_blacklist None comma\-separated list of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280815,7 +280835,7 @@ salt \(aq*\(aq saltutil.sync_output saltenv=base,dev .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) New in version 2015.8.11,2016.3.2. .sp @@ -280837,6 +280857,10 @@ comma\-separated list of modules to sync .B extmod_blacklist None comma\-separated list of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp \fBNOTE:\fP @@ -325208,6 +325232,14 @@ User .sp Default is \fBMachine\fP +.UNINDENT +.TP +.B Raises +.INDENT 7.0 +.IP \(bu 2 +\fI\%SaltInvocationError\fP \-\- Invalid policy_class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure .UNINDENT .TP .B Returns @@ -325216,9 +325248,6 @@ None: Key/value not present .TP .B Return type \fI\%bool\fP -.TP -.B Raises -\fI\%SaltInvocationError\fP \-\- Invalid policy_class .UNINDENT .sp CLI Example: @@ -325264,6 +325293,14 @@ User .sp Default is \fBMachine\fP +.UNINDENT +.TP +.B Raises +.INDENT 7.0 +.IP \(bu 2 +\fI\%SaltInvocationError\fP \-\- Invalid policy_class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure .UNINDENT .TP .B Returns @@ -325272,9 +325309,6 @@ None: If already disabled .TP .B Return type \fI\%bool\fP -.TP -.B Raises -\fI\%SaltInvocationError\fP \-\- Invalid policy_class .UNINDENT .sp CLI Example: @@ -325470,12 +325504,6 @@ Default is \fBMachine\fP .UNINDENT .TP -.B Returns -\fBTrue\fP if successful, otherwise \fBFalse\fP -.TP -.B Return type -\fI\%bool\fP -.TP .B Raises .INDENT 7.0 .IP \(bu 2 @@ -325485,6 +325513,12 @@ Default is \fBMachine\fP .IP \(bu 2 \fI\%SaltInvocationError\fP \-\- v_data doesn\(aqt match v_type .UNINDENT +.TP +.B Returns +\fBTrue\fP if successful, otherwise \fBFalse\fP +.TP +.B Return type +\fI\%bool\fP .UNINDENT .sp CLI Example: @@ -325533,10 +325567,18 @@ Default is \fBMachine\fP .UNINDENT .TP .B Raises +.INDENT 7.0 +.IP \(bu 2 \fI\%SaltInvocationError\fP \-\- Invalid policy class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure +.UNINDENT .TP .B Returns -None +True if successful +.TP +.B Return type +\fI\%bool\fP .UNINDENT .sp CLI Example: @@ -412564,6 +412606,8 @@ tomdroid\-src\-0.7.3.tar.gz: .fi .UNINDENT .UNINDENT +.sp +source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server. .UNINDENT .UNINDENT .INDENT 7.0 @@ -444129,6 +444173,13 @@ If your service states are running into trouble with init system detection, please see the \fI\%Overriding Virtual Module Providers\fP section of Salt\(aqs module documentation to work around possible errors. .sp +For services managed by systemd, the systemd_service module includes a built\-in +feature to reload the daemon when unit files are changed or extended. This +feature is used automatically by the service state and the systemd_service +module when running on a systemd minion, so there is no need to set up your own +methods of reloading the daemon. If you need to manually reload the daemon for +some reason, you can use the \fI\%systemd_service.systemctl_reload\fP function provided by Salt. +.sp \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 @@ -466637,7 +466688,7 @@ You can enable or disable test groups locally by passing their respected flag: \-\-flaky\-jail \- Test that need to be temporarily skipped. .UNINDENT .sp -In Your PR, you can enable or disable test groups by setting a label. +In your PR, you can enable or disable test groups by setting a label. All fast, slow, and core tests specified in the change file will always run. .INDENT 0.0 .IP \(bu 2 @@ -466671,7 +466722,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the \fBsalt/changelog\fP directory, using the \fB.\fP format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: .INDENT 0.0 .INDENT 3.5 .sp @@ -476582,6 +476633,66 @@ Update to \fBmarkdown\-it\-py==2.2.0\fP due to: .UNINDENT .UNINDENT .UNINDENT +(release\-3006.1)= +.SS Salt 3006.1 release notes +.SS Changelog +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +Check that the return data from the cloud create function is a dictionary before attempting to pull values out. \fI\%#61236\fP +.IP \(bu 2 +Ensure NamedLoaderContext\(aqs have their value() used if passing to other modules \fI\%#62477\fP +.IP \(bu 2 +add documentation note about reactor state ids. \fI\%#63589\fP +.IP \(bu 2 +Added support for \fBtest=True\fP to the \fBfile.cached\fP state module \fI\%#63785\fP +.IP \(bu 2 +Updated \fBsource_hash\fP documentation and added a log warning when \fBsource_hash\fP is used with a source other than \fBhttp\fP, \fBhttps\fP and \fBftp\fP\&. \fI\%#63810\fP +.IP \(bu 2 +Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. \fI\%#64081\fP +.IP \(bu 2 +Fix dmsetup device names with hyphen being picked up. \fI\%#64082\fP +.IP \(bu 2 +Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. +This event is only used when these functions are called via the schedule execution modules. +Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. \fI\%#64102\fP, \fI\%#64103\fP +.IP \(bu 2 +Default to a 0 timeout if none is given for the terraform roster to avoid \fB\-o ConnectTimeout=None\fP when using \fBsalt\-ssh\fP \fI\%#64109\fP +.IP \(bu 2 +Disable class level caching of the file client on \fBSaltCacheLoader\fP and properly use context managers to take care of initialization and termination of the file client. \fI\%#64111\fP +.IP \(bu 2 +Fixed several file client uses which were not properly terminating it by switching to using it as a context manager +whenever possible or making sure \fB\&.destroy()\fP was called when using a context manager was not possible. \fI\%#64113\fP +.IP \(bu 2 +Fix running \fI\%setup.py\fP when passing in \-\-salt\-config\-dir and \-\-salt\-cache\-dir arguments. \fI\%#64114\fP +.IP \(bu 2 +Moved /etc/salt/proxy and /lib/systemd/system/salt\-proxy@.service to the salt\-minion DEB package \fI\%#64117\fP +.IP \(bu 2 +Stop passing \fB**kwargs\fP and be explicit about the keyword arguments to pass, namely, to \fBcp.cache_file\fP call in \fBsalt.states.pkg\fP \fI\%#64118\fP +.IP \(bu 2 +lgpo_reg.set_value now returns \fBTrue\fP on success instead of \fBNone\fP \fI\%#64126\fP +.IP \(bu 2 +Make salt user\(aqs home /opt/saltstack/salt \fI\%#64141\fP +.IP \(bu 2 +Fix cmd.run doesn\(aqt output changes in test mode \fI\%#64150\fP +.IP \(bu 2 +Move salt user and group creation to common package \fI\%#64158\fP +.IP \(bu 2 +Fixed issue in salt\-cloud so that multiple masters specified in the cloud +are written to the minion config properly \fI\%#64170\fP +.IP \(bu 2 +Make sure the \fBsalt\-ssh\fP CLI calls it\(aqs \fBfsclient.destroy()\fP method when done. \fI\%#64184\fP +.IP \(bu 2 +Stop using the deprecated \fBsalt.transport.client\fP imports. \fI\%#64186\fP +.IP \(bu 2 +Add a \fB\&.pth\fP to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. \fI\%#64192\fP +.IP \(bu 2 +Fix \fBlgpo_reg\fP state to work with User policy \fI\%#64200\fP +.IP \(bu 2 +Cloud deployment directories are owned by salt user and group \fI\%#64204\fP +.IP \(bu 2 +\fBlgpo_reg\fP state now enforces and reports changes to the registry \fI\%#64222\fP +.UNINDENT .sp See \fI\%Install a release candidate\fP for more information about installing an RC when one is available. diff --git a/doc/man/spm.1 b/doc/man/spm.1 index b680a20ddab..90cc6e3d2d7 100644 --- a/doc/man/spm.1 +++ b/doc/man/spm.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SPM" "1" "Generated on April 18, 2023 at 08:56:37 PM UTC." "3006.0" "Salt" +.TH "SPM" "1" "Generated on May 05, 2023 at 05:45:04 PM UTC." "3006.1" "Salt" .SH NAME spm \- Salt Package Manager Command .sp diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index a6022c94ee1..74d4b58b084 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -5013,6 +5013,7 @@ Default: ``3600`` If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount of time, in seconds, before the cache is considered invalid by a master and a fresh pillar is recompiled and stored. +The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. .. conf_master:: pillar_cache_backend diff --git a/doc/topics/releases/3006.1.md b/doc/topics/releases/3006.1.md new file mode 100644 index 00000000000..2bf2dea1d31 --- /dev/null +++ b/doc/topics/releases/3006.1.md @@ -0,0 +1,52 @@ +(release-3006.1)= +# Salt 3006.1 release notes + + + + + + + + +## Changelog + +### Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) diff --git a/doc/topics/releases/templates/3006.1.md.template b/doc/topics/releases/templates/3006.1.md.template new file mode 100644 index 00000000000..f5302a4eab2 --- /dev/null +++ b/doc/topics/releases/templates/3006.1.md.template @@ -0,0 +1,15 @@ +(release-3006.1)= +# Salt 3006.1 release notes{{ unreleased }} +{{ warning }} + + + + + +## Changelog +{{ changelog }} diff --git a/pkg/common/conf/master b/pkg/common/conf/master index fcad1961c10..4f0fa646d49 100644 --- a/pkg/common/conf/master +++ b/pkg/common/conf/master @@ -1025,6 +1025,7 @@ user: salt # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/pkg/common/env-cleanup-rules.yml b/pkg/common/env-cleanup-rules.yml index c04e99fdc0a..6a0ff594a97 100644 --- a/pkg/common/env-cleanup-rules.yml +++ b/pkg/common/env-cleanup-rules.yml @@ -148,7 +148,6 @@ ci: - "**/site-packages/salt/modules/runit.py*" - "**/site-packages/salt/modules/s6.py*" - "**/site-packages/salt/modules/scsi.py*" - - "**/site-packages/salt/modules/seed.py*" - "**/site-packages/salt/modules/sensors.py*" - "**/site-packages/salt/modules/service.py*" - "**/site-packages/salt/modules/shadow.py*" diff --git a/pkg/common/onedir/_salt_onedir_extras.pth b/pkg/common/onedir/_salt_onedir_extras.pth new file mode 100644 index 00000000000..1e7742532df --- /dev/null +++ b/pkg/common/onedir/_salt_onedir_extras.pth @@ -0,0 +1 @@ +import _salt_onedir_extras; _salt_onedir_extras.setup(__file__) diff --git a/pkg/common/onedir/_salt_onedir_extras.py b/pkg/common/onedir/_salt_onedir_extras.py new file mode 100644 index 00000000000..366136ba2a9 --- /dev/null +++ b/pkg/common/onedir/_salt_onedir_extras.py @@ -0,0 +1,18 @@ +import pathlib +import sys + + +def setup(pth_file_path): + # Discover the extras-. directory + extras_parent_path = pathlib.Path(pth_file_path).resolve().parent.parent + if not sys.platform.startswith("win"): + extras_parent_path = extras_parent_path.parent + + extras_path = str(extras_parent_path / "extras-{}.{}".format(*sys.version_info)) + + if extras_path in sys.path and sys.path[0] != extras_path: + # The extras directory must come first + sys.path.remove(extras_path) + + if extras_path not in sys.path: + sys.path.insert(0, extras_path) diff --git a/pkg/debian/changelog b/pkg/debian/changelog index b014ff40a01..035085104b3 100644 --- a/pkg/debian/changelog +++ b/pkg/debian/changelog @@ -1,3 +1,41 @@ +salt (3006.1) stable; urgency=medium + + + # Fixed + + * Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) + * Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) + * add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) + * Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) + * Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) + * Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) + * Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) + * Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) + * Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) + * Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) + * Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) + * Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) + * Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) + * Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) + * lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) + * Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) + * Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) + * Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) + * Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) + * Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) + * Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) + * Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) + * Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) + * Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) + * ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + + -- Salt Project Packaging Fri, 05 May 2023 17:44:35 +0000 + salt (3006.0) stable; urgency=medium diff --git a/pkg/debian/rules b/pkg/debian/rules index ebc6bdff52d..a73b38b4041 100755 --- a/pkg/debian/rules +++ b/pkg/debian/rules @@ -29,6 +29,7 @@ override_dh_auto_build: build/onedir/venv/bin/tools pkg build salt-onedir . --package-name build/onedir/salt --platform linux build/onedir/venv/bin/tools pkg pre-archive-cleanup --pkg build/onedir/salt + else override_dh_auto_build: # The relenv onedir is being provided, all setup up until Salt is installed @@ -38,6 +39,7 @@ override_dh_auto_build: # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; + endif # dh_auto_install tries to invoke distutils causing failures. @@ -47,4 +49,9 @@ override_dh_auto_install: override_dh_install: mkdir -p debian/salt-common/opt/saltstack cp -R build/onedir/salt debian/salt-common/opt/saltstack/ + + # Generate master config + mkdir -p debian/salt-master/etc/salt + sed 's/#user: root/user: salt/g' conf/master > debian/salt-master/etc/salt/master + dh_install diff --git a/pkg/debian/salt-cloud.postinst b/pkg/debian/salt-cloud.postinst new file mode 100644 index 00000000000..12a955b9349 --- /dev/null +++ b/pkg/debian/salt-cloud.postinst @@ -0,0 +1,5 @@ +case "$1" in + configure) + chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy + ;; +esac diff --git a/pkg/debian/salt-common.preinst b/pkg/debian/salt-common.preinst new file mode 100644 index 00000000000..48816330357 --- /dev/null +++ b/pkg/debian/salt-common.preinst @@ -0,0 +1,39 @@ +case "$1" in + install|upgrade) + [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt + [ -z "$SALT_USER" ] && SALT_USER=salt + [ -z "$SALT_NAME" ] && SALT_NAME="Salt" + [ -z "$SALT_GROUP" ] && SALT_GROUP=salt + + # create user to avoid running server as root + # 1. create group if not existing + if ! getent group | grep -q "^$SALT_GROUP:" ; then + echo -n "Adding group $SALT_GROUP.." + addgroup --quiet --system $SALT_GROUP 2>/dev/null ||true + echo "..done" + fi + # 2. create homedir if not existing + test -d $SALT_HOME || mkdir -p $SALT_HOME + # 3. create user if not existing + if ! getent passwd | grep -q "^$SALT_USER:"; then + echo -n "Adding system user $SALT_USER.." + useradd --system \ + --no-create-home \ + -s /sbin/nologin \ + -g $SALT_GROUP \ + $SALT_USER 2>/dev/null || true + echo "..done" + fi + # 4. adjust passwd entry + usermod -c "$SALT_NAME" \ + -d $SALT_HOME \ + -g $SALT_GROUP \ + $SALT_USER + # 5. adjust file and directory permissions + if ! dpkg-statoverride --list $SALT_HOME >/dev/null + then + chown -R $SALT_USER:$SALT_GROUP $SALT_HOME + chmod u=rwx,g=rwx,o=rx $SALT_HOME + fi + ;; +esac diff --git a/pkg/debian/salt-master.install b/pkg/debian/salt-master.install index 35ea3571d08..3d665d5b164 100644 --- a/pkg/debian/salt-master.install +++ b/pkg/debian/salt-master.install @@ -1,2 +1 @@ -pkg/common/conf/master /etc/salt pkg/common/salt-master.service /lib/systemd/system diff --git a/pkg/debian/salt-master.postinst b/pkg/debian/salt-master.postinst index 6ac58f198f9..1c78ee73478 100644 --- a/pkg/debian/salt-master.postinst +++ b/pkg/debian/salt-master.postinst @@ -1,3 +1,6 @@ -adduser --system salt --group -chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt -if command -v systemctl; then systemctl enable salt-master; fi +case "$1" in + configure) + chown -R salt:salt /etc/salt /var/log/salt /opt/saltstack/salt/ /var/cache/salt/ /var/run/salt + if command -v systemctl; then systemctl enable salt-master; fi + ;; +esac diff --git a/pkg/macos/package.sh b/pkg/macos/package.sh index 6221fde2947..70734b89b78 100755 --- a/pkg/macos/package.sh +++ b/pkg/macos/package.sh @@ -250,6 +250,7 @@ else fi +PKG_FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH.pkg" if [ "${SIGN}" -eq 1 ]; then _msg "Building the product package (signed)" # This is not a nightly build, so we want to sign it @@ -260,7 +261,7 @@ if [ "${SIGN}" -eq 1 ]; then --version="$VERSION" \ --sign "$DEV_INSTALL_CERT" \ --timestamp \ - "$FILE" > "$CMD_OUTPUT" 2>&1; then + "$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then _success else _failure @@ -268,12 +269,11 @@ if [ "${SIGN}" -eq 1 ]; then else _msg "Building the product package (unsigned)" # This is a nightly build, so we don't sign it - FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-unsigned.pkg" if productbuild --resources="$SCRIPT_DIR/pkg-resources" \ --distribution="$DIST_XML" \ --package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \ --version="$VERSION" \ - "$FILE" > "$CMD_OUTPUT" 2>&1; then + "$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then _success else _failure diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 25a28ea59dd..75b186f1cc1 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -14,6 +14,10 @@ %global __requires_exclude_from ^.*\\.so.*$ %define _source_payload w2.gzdio %define _binary_payload w2.gzdio +%define _SALT_GROUP salt +%define _SALT_USER salt +%define _SALT_NAME Salt +%define _SALT_HOME /opt/saltstack/salt # Disable python bytecompile for MANY reasons %global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') @@ -21,7 +25,7 @@ %define fish_dir %{_datadir}/fish/vendor_functions.d Name: salt -Version: 3006.0 +Version: 3006.1 Release: 0 Summary: A parallel remote execution system Group: System Environment/Daemons @@ -43,9 +47,13 @@ BuildRequires: python3 BuildRequires: python3-pip BuildRequires: openssl BuildRequires: git + +# rhel is not defined on all rpm based distros. +%if %{?rhel:1}%{!?rhel:0} %if %{rhel} >= 9 BuildRequires: libxcrypt-compat %endif +%endif %description Salt is a distributed remote execution system used to execute commands and @@ -140,8 +148,12 @@ cd $RPM_BUILD_DIR # the /bin directory find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; - $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux + $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux $RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt + + # Generate master config + sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master + %else # The relenv onedir is being provided, all setup up until Salt is installed # is expected to be done @@ -151,6 +163,9 @@ cd $RPM_BUILD_DIR # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; + # Generate master config + sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master + cd $RPM_BUILD_DIR %endif @@ -208,7 +223,7 @@ install -m 0755 %{buildroot}/opt/saltstack/salt/salt-pip %{buildroot}%{_bindir}/ # Add the config files install -p -m 0640 %{_salt_src}/conf/minion %{buildroot}%{_sysconfdir}/salt/minion -install -p -m 0640 %{_salt_src}/pkg/common/conf/master %{buildroot}%{_sysconfdir}/salt/master +install -p -m 0640 $RPM_BUILD_DIR/build/master %{buildroot}%{_sysconfdir}/salt/master install -p -m 0640 %{_salt_src}/conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud install -p -m 0640 %{_salt_src}/conf/roster %{buildroot}%{_sysconfdir}/salt/roster install -p -m 0640 %{_salt_src}/conf/proxy %{buildroot}%{_sysconfdir}/salt/proxy @@ -274,8 +289,6 @@ rm -rf %{buildroot} %dir %{_sysconfdir}/salt/pki - - %files master %defattr(-,root,root) %doc %{_mandir}/man7/salt.7* @@ -307,6 +320,7 @@ rm -rf %{buildroot} %dir %attr(0750, salt, salt) %{_var}/cache/salt/master/syndics/ %dir %attr(0750, salt, salt) %{_var}/cache/salt/master/tokens/ + %files minion %defattr(-,root,root) %doc %{_mandir}/man1/salt-call.1* @@ -323,17 +337,20 @@ rm -rf %{buildroot} %dir %{_sysconfdir}/salt/minion.d %dir %attr(0750, root, root) %{_var}/cache/salt/minion/ + %files syndic %doc %{_mandir}/man1/salt-syndic.1* %{_bindir}/salt-syndic %{_unitdir}/salt-syndic.service + %files api %defattr(-,root,root) %doc %{_mandir}/man1/salt-api.1* %{_bindir}/salt-api %{_unitdir}/salt-api.service + %files cloud %doc %{_mandir}/man1/salt-cloud.1* %{_bindir}/salt-cloud @@ -344,36 +361,65 @@ rm -rf %{buildroot} %{_sysconfdir}/salt/cloud.providers.d %config(noreplace) %{_sysconfdir}/salt/cloud + %files ssh %doc %{_mandir}/man1/salt-ssh.1* %{_bindir}/salt-ssh %config(noreplace) %{_sysconfdir}/salt/roster -# Add salt user/group for Salt Master -%pre master -getent group salt >/dev/null || groupadd -r salt -getent passwd salt >/dev/null || \ - useradd -r -g salt -s /sbin/nologin \ - -c "Salt user for Salt Master" salt + +%pre +# create user to avoid running server as root +# 1. create group if not existing +if ! getent group %{_SALT_GROUP}; then + groupadd --system %{_SALT_GROUP} 2>/dev/null ||true +fi +# 2. create homedir if not existing +test -d %{_SALT_HOME} || mkdir -p %{_SALT_HOME} +# 3. create user if not existing +# -g %{_SALT_GROUP} \ +if ! getent passwd | grep -q "^%{_SALT_USER}:"; then + useradd --system \ + --no-create-home \ + -s /sbin/nologin \ + -g %{_SALT_GROUP} \ + %{_SALT_USER} 2>/dev/null || true +fi +# 4. adjust passwd entry +usermod -c "%{_SALT_NAME}" \ + -d %{_SALT_HOME} \ + -g %{_SALT_GROUP} \ + %{_SALT_USER} +# 5. adjust file and directory permissions +chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} # assumes systemd for RHEL 7 & 8 & 9 %preun master # RHEL 9 is giving warning msg if syndic is not installed, supress it %systemd_preun salt-syndic.service > /dev/null 2>&1 + %preun minion %systemd_preun salt-minion.service + %preun api %systemd_preun salt-api.service + %post +chown -R %{_SALT_USER}:%{_SALT_GROUP} %{_SALT_HOME} +chmod u=rwx,g=rwx,o=rx %{_SALT_HOME} ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip + %post cloud +chown -R salt:salt /etc/salt/cloud.deploy.d +chown -R salt:salt /opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud + %post master %systemd_post salt-master.service ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt @@ -450,6 +496,41 @@ fi %changelog +* Fri May 05 2023 Salt Project Packaging - 3006.1 + +# Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + * Tue Apr 18 2023 Salt Project Packaging - 3006.0 # Removed diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index d8b39e7070a..89f54fe2f72 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -21,6 +21,7 @@ from tests.support.helpers import ( SaltPkgInstall, TestUser, ) +from tests.support.sminion import create_sminion log = logging.getLogger(__name__) @@ -33,6 +34,16 @@ def version(install_salt): return install_salt.get_version(version_only=True) +@pytest.fixture(scope="session") +def sminion(): + return create_sminion() + + +@pytest.fixture(scope="session") +def grains(sminion): + return sminion.opts["grains"].copy() + + def pytest_addoption(parser): """ register argparse-style options and ini-style config values. @@ -463,12 +474,17 @@ def extras_pypath(): extras_dir = "extras-{}.{}".format(*sys.version_info) if platform.is_windows(): return pathlib.Path( - os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir, "bin" + os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir ) elif platform.is_darwin(): - return pathlib.Path(f"/opt", "salt", extras_dir, "bin") + return pathlib.Path("/opt", "salt", extras_dir) else: - return pathlib.Path(f"/opt", "saltstack", "salt", extras_dir, "bin") + return pathlib.Path("/opt", "saltstack", "salt", extras_dir) + + +@pytest.fixture(scope="module") +def extras_pypath_bin(extras_pypath): + return extras_pypath / "bin" @pytest.fixture(scope="module") @@ -476,7 +492,7 @@ def salt_api(salt_master, install_salt, extras_pypath): """ start up and configure salt_api """ - shutil.rmtree(str(extras_pypath.parent), ignore_errors=True) + shutil.rmtree(str(extras_pypath), ignore_errors=True) start_timeout = None if platform.is_windows() and install_salt.singlebin: start_timeout = 240 diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index bbe26df549f..5bb0e3a96a4 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -1,229 +1,55 @@ """ Test Salt Pkg Downloads """ +import contextlib import logging import os import pathlib -import re import shutil -import attr import packaging import pytest from pytestskipmarkers.utils import platform -from saltfactories.utils import random_string log = logging.getLogger(__name__) -@attr.s(kw_only=True, slots=True) -class PkgImage: - name = attr.ib() - os_type = attr.ib() - os_version = attr.ib() - os_codename = attr.ib(default=None) - container_id = attr.ib() - container = attr.ib(default=None) - - def __str__(self): - return f"{self.container_id}" - - -def get_test_versions(): - test_versions = [] - - containers = [ - { - "image": "ghcr.io/saltstack/salt-ci-containers/amazon-linux:2", - "os_type": "amazon", - "os_version": 2, - "container_id": "amazon_2", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/centos:7", - "os_type": "redhat", - "os_version": 7, - "container_id": "centos_7", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/centos-stream:8", - "os_type": "redhat", - "os_version": 8, - "container_id": "centosstream_8", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/centos-stream:9", - "os_type": "redhat", - "os_version": 9, - "container_id": "centosstream_9", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/fedora:36", - "os_type": "fedora", - "os_version": 36, - "container_id": "fedora_36", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/fedora:37", - "os_type": "fedora", - "os_version": 37, - "container_id": "fedora_37", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/fedora:38", - "os_type": "fedora", - "os_version": 38, - "container_id": "fedora_38", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/debian:10", - "os_type": "debian", - "os_version": 10, - "os_codename": "buster", - "container_id": "debian_10", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/debian:11", - "os_type": "debian", - "os_version": 11, - "os_codename": "bullseye", - "container_id": "debian_11", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/ubuntu:20.04", - "os_type": "ubuntu", - "os_version": 20.04, - "os_codename": "focal", - "container_id": "ubuntu_20_04", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/ubuntu:22.04", - "os_type": "ubuntu", - "os_version": 22.04, - "os_codename": "jammy", - "container_id": "ubuntu_22_04", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/photon:3", - "os_type": "photon", - "os_version": 3, - "container_id": "photon_3", - }, - { - "image": "ghcr.io/saltstack/salt-ci-containers/photon:4", - "os_type": "photon", - "os_version": 4, - "container_id": "photon_4", - }, - ] - for container in containers: - test_versions.append( - PkgImage( - name=container["image"], - os_type=container["os_type"], - os_version=container["os_version"], - os_codename=container.get("os_codename", ""), - container_id=container["container_id"], - ) - ) - - return test_versions - - -def get_container_type_id(value): - return f"{value}" - - -@pytest.fixture(scope="module", params=get_test_versions(), ids=get_container_type_id) -def download_test_image(request): - return request.param - - def get_salt_test_commands(): - salt_release = get_salt_release() if platform.is_windows(): if packaging.version.parse(salt_release) > packaging.version.parse("3005"): salt_test_commands = [ - "salt-call.exe --local test.versions", - "salt-call.exe --local grains.items", - "salt-minion.exe --version", + ["salt-call.exe", "--local", "test.versions"], + ["salt-call.exe", "--local", "grains.items"], + ["salt-minion.exe", "--version"], ] else: salt_test_commands = [ - "salt-call.bat --local test.versions", - "salt-call.bat --local grains.items", - "salt.bat --version", - "salt-master.bat --version", - "salt-minion.bat --version", - "salt-ssh.bat --version", - "salt-syndic.bat --version", - "salt-api.bat --version", - "salt-cloud.bat --version", + ["salt-call.bat", "--local", "test.versions"], + ["salt-call.bat", "--local", "grains.items"], + ["salt.bat", "--version"], + ["salt-master.bat", "--version"], + ["salt-minion.bat", "--version"], + ["salt-ssh.bat", "--version"], + ["salt-syndic.bat", "--version"], + ["salt-api.bat", "--version"], + ["salt-cloud.bat", "--version"], ] else: salt_test_commands = [ - "salt-call --local test.versions", - "salt-call --local grains.items", - "salt --version", - "salt-master --version", - "salt-minion --version", - "salt-ssh --version", - "salt-syndic --version", - "salt-api --version", - "salt-cloud --version", + ["salt-call", "--local", "test.versions"], + ["salt-call", "--local", "grains.items"], + ["salt", "--version"], + ["salt-master", "--version"], + ["salt-minion", "--version"], + ["salt-ssh", "--version"], + ["salt-syndic", "--version"], + ["salt-api", "--version"], + ["salt-cloud", "--version"], ] return salt_test_commands -@pytest.fixture(scope="module") -def pkg_container( - salt_factories, - download_test_image, - root_url, - salt_release, - tmp_path_factory, - gpg_key_name, -): - downloads_path = tmp_path_factory.mktemp("downloads") - container = salt_factories.get_container( - random_string(f"{download_test_image.container_id}_"), - download_test_image.name, - pull_before_start=True, - skip_on_pull_failure=True, - skip_if_docker_client_not_connectable=True, - container_run_kwargs=dict( - volumes={ - str(downloads_path): {"bind": "/downloads", "mode": "z"}, - } - ), - ) - try: - container_setup_func = globals()[f"setup_{download_test_image.os_type}"] - except KeyError: - raise pytest.skip.Exception( - f"Unable to handle {download_test_image.os_type}. Skipping.", - _use_item_location=True, - ) - container.before_terminate(shutil.rmtree, str(downloads_path), ignore_errors=True) - - with container.started(): - download_test_image.container = container - try: - container_setup_func( - container, - download_test_image.os_version, - download_test_image.os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, - ) - yield download_test_image - except Exception as exc: - pytest.fail(f"Failed to setup {pkg_container.os_type}: {exc}") - - @pytest.fixture(scope="module") def root_url(salt_release): if os.environ.get("SALT_REPO_TYPE", "release") == "staging": @@ -270,6 +96,28 @@ def get_salt_release(): return salt_release +def get_repo_subpath_params(): + current_release = packaging.version.parse(get_salt_release()) + params = ["minor", current_release.major] + latest_env_var = os.environ.get("LATEST_SALT_RELEASE") + if latest_env_var is not None: + latest_release = packaging.version.parse(latest_env_var) + if current_release >= latest_release: + log.debug( + f"Running the tests for the latest release since {str(current_release)} >= {str(latest_release)}" + ) + params.append("latest") + return params + + +@pytest.fixture( + scope="module", + params=get_repo_subpath_params(), +) +def repo_subpath(request): + return request.param + + @pytest.fixture(scope="module") def gpg_key_name(salt_release): if packaging.version.parse(salt_release) > packaging.version.parse("3005"): @@ -282,28 +130,123 @@ def salt_release(): yield get_salt_release() +@pytest.fixture(scope="module") +def _setup_system( + tmp_path_factory, grains, shell, root_url, salt_release, gpg_key_name, repo_subpath +): + downloads_path = tmp_path_factory.mktemp("downloads") + try: + # Windows is a special case, because sometimes we need to uninstall the packages + if grains["os_family"] == "Windows": + with setup_windows( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + repo_subpath=repo_subpath, + ): + yield + else: + if grains["os_family"] == "MacOS": + setup_macos( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "Amazon": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "Fedora": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "VMware Photon OS": + setup_redhat_family( + shell, + os_name="photon", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os_family"] == "RedHat": + setup_redhat_family( + shell, + os_name="redhat", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os_family"] == "Debian": + setup_debian_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osrelease"], + os_codename=grains["oscodename"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + else: + pytest.fail("Don't know how to handle %s", grains["osfinger"]) + yield + finally: + shutil.rmtree(downloads_path, ignore_errors=True) + + def setup_redhat_family( - container, + shell, + os_name, os_version, - os_codename, root_url, salt_release, downloads_path, - os_name, gpg_key_name, + repo_subpath, ): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" if arch == "aarch64": arch = "arm64" - repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}" - gpg_file_url = f"{repo_url_base}/{gpg_key_name}" + if repo_subpath == "minor": + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) + else: + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" + + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) except Exception as exc: pytest.fail(f"Failed to download {gpg_file_url}: {exc}") - ret = container.run("rpm", "--import", f"/downloads/{gpg_key_name}") + ret = shell.run("rpm", "--import", str(downloads_path / gpg_key_name), check=False) if ret.returncode != 0: pytest.fail("Failed to import gpg key") @@ -311,11 +254,9 @@ def setup_redhat_family( f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo" ) - clean_command = "all" if os_name == "photon" else "expire-cache" - install_dmesg = ("yum", "install", "-y", "util-linux") commands = [ - ("mv", f"/downloads/{repo_file.name}", f"/etc/yum.repos.d/salt-{os_name}.repo"), - ("yum", "clean", clean_command), + ("mv", str(repo_file), "/etc/yum.repos.d/salt.repo"), + ("yum", "clean", "all" if os_name == "photon" else "expire-cache"), ( "yum", "install", @@ -329,109 +270,22 @@ def setup_redhat_family( ), ] - # For some reason, the centosstream9 container doesn't have dmesg installed - if os_version == 9 and os_name == "redhat": - commands.insert(2, install_dmesg) - for cmd in commands: - ret = container.run(*cmd) + ret = shell.run(*cmd, check=False) if ret.returncode != 0: - pytest.fail(f"Failed to run: {' '.join(cmd)!r}") - - -def setup_amazon( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "amazon", - gpg_key_name, - ) - - -def setup_redhat( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "redhat", - gpg_key_name, - ) - - -def setup_fedora( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "fedora", - gpg_key_name, - ) - - -def setup_photon( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_redhat_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "photon", - gpg_key_name, - ) + pytest.fail(f"Failed to run '{' '.join(cmd)!r}':\n{ret}") def setup_debian_family( - container, + shell, + os_name, os_version, os_codename, root_url, salt_release, downloads_path, - os_name, gpg_key_name, + repo_subpath, ): arch = os.environ.get("SALT_REPO_ARCH") or "amd64" if arch == "aarch64": @@ -439,12 +293,18 @@ def setup_debian_family( elif arch == "x86_64": arch = "amd64" - ret = container.run("apt-get", "update", "-y") + ret = shell.run("apt-get", "update", "-y", check=False) if ret.returncode != 0: - pytest.fail("Failed to run: 'apt-get update -y'") + pytest.fail(str(ret)) + + if repo_subpath == "minor": + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) + else: + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" - repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/minor/{salt_release}" - gpg_file_url = f"{repo_url_base}/{gpg_key_name}" try: pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) except Exception as exc: @@ -455,10 +315,14 @@ def setup_debian_family( f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n" ) commands = [ - ("mv", f"/downloads/{gpg_key_name}", f"/usr/share/keyrings/{gpg_key_name}"), ( "mv", - f"/downloads/{salt_sources_path.name}", + str(downloads_path / gpg_key_name), + f"/usr/share/keyrings/{gpg_key_name}", + ), + ( + "mv", + str(salt_sources_path), "/etc/apt/sources.list.d/salt.list", ), ("apt-get", "install", "-y", "ca-certificates"), @@ -477,156 +341,110 @@ def setup_debian_family( ), ] for cmd in commands: - ret = container.run(*cmd) + ret = shell.run(*cmd) if ret.returncode != 0: - pytest.fail(f"Failed to run: {' '.join(cmd)!r}\n{ret}") + pytest.fail(str(ret)) -def setup_debian( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_debian_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "debian", - gpg_key_name, - ) - - -def setup_ubuntu( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - gpg_key_name, -): - setup_debian_family( - container, - os_version, - os_codename, - root_url, - salt_release, - downloads_path, - "ubuntu", - gpg_key_name, - ) - - -@pytest.fixture(scope="module") -def setup_macos(root_url, salt_release, shell): - +def setup_macos(shell, root_url, salt_release, downloads_path, repo_subpath): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" if arch == "aarch64": arch = "arm64" if packaging.version.parse(salt_release) > packaging.version.parse("3005"): mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg" - mac_pkg_url = f"{root_url}/macos/minor/{salt_release}/{mac_pkg}" + if repo_subpath == "minor": + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" + else: + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}" else: mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" - mac_pkg_path = f"/tmp/{mac_pkg}" - pytest.helpers.download_file(mac_pkg_url, f"/tmp/{mac_pkg}") + mac_pkg_path = downloads_path / mac_pkg + pytest.helpers.download_file(mac_pkg_url, mac_pkg_path) ret = shell.run( "installer", "-pkg", - mac_pkg_path, + str(mac_pkg_path), "-target", "/", check=False, ) assert ret.returncode == 0, ret - yield + +@contextlib.contextmanager +def setup_windows(shell, root_url, salt_release, downloads_path, repo_subpath): + try: + root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") + + arch = os.environ.get("SALT_REPO_ARCH") or "amd64" + install_type = os.environ.get("INSTALL_TYPE") or "msi" + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + if install_type.lower() == "nsis": + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" + else: + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" + if repo_subpath == "minor": + win_pkg_url = ( + f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + ) + else: + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" + ssm_bin = root_dir / "ssm.exe" + else: + win_pkg = f"salt-{salt_release}-windows-{arch}.exe" + win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" + ssm_bin = root_dir / "bin" / "ssm_bin" + + pkg_path = downloads_path / win_pkg + + pytest.helpers.download_file(win_pkg_url, pkg_path) + if install_type.lower() == "nsis": + ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) + else: + ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""') + assert ret.returncode == 0, ret + + log.debug("Removing installed salt-minion service") + ret = shell.run( + "cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False + ) + assert ret.returncode == 0, ret + yield + finally: + # We need to uninstall the MSI packages, otherwise they will not install correctly + if install_type.lower() == "msi": + ret = shell.run("msiexec", "/qn", "/x", str(pkg_path)) + assert ret.returncode == 0, ret @pytest.fixture(scope="module") -def setup_windows(root_url, salt_release, shell): +def install_dir(_setup_system): + if platform.is_windows(): + return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt").resolve() + if platform.is_darwin(): + return pathlib.Path("/opt", "salt") + return pathlib.Path("/opt", "saltstack", "salt") - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") - arch = os.environ.get("SALT_REPO_ARCH") or "amd64" - install_type = os.environ.get("INSTALL_TYPE") or "msi" - if packaging.version.parse(salt_release) > packaging.version.parse("3005"): - if install_type.lower() == "nsis": - if arch.lower() != "x86": - arch = arch.upper() - win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" - else: - if arch.lower() != "x86": - arch = arch.upper() - win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" - win_pkg_url = f"{root_url}/windows/minor/{salt_release}/{win_pkg}" - ssm_bin = root_dir / "ssm.exe" - else: - win_pkg = f"salt-{salt_release}-windows-{arch}.exe" - win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" - ssm_bin = root_dir / "bin" / "ssm_bin" +@pytest.fixture(scope="module") +def salt_test_command(request, install_dir): + command = request.param + command[0] = str(install_dir / command[0]) + return command - pkg_path = pathlib.Path(r"C:\TEMP", win_pkg) - pkg_path.parent.mkdir(exist_ok=True) - pytest.helpers.download_file(win_pkg_url, pkg_path) - if install_type.lower() == "nsis": - ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) - else: - ret = shell.run("msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""') - assert ret.returncode == 0, ret - - log.debug("Removing installed salt-minion service") - ret = shell.run( - "cmd", "/c", str(ssm_bin), "remove", "salt-minion", "confirm", check=False - ) - assert ret.returncode == 0, ret - - -@pytest.mark.skip_unless_on_linux -@pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -@pytest.mark.skip_if_binaries_missing("dockerd") -def test_download_linux(salt_test_command, pkg_container, root_url, salt_release): - """ - Test downloading of Salt packages and running various commands on Linux hosts - """ - res = pkg_container.container.run(salt_test_command) - assert res.returncode == 0 - - -@pytest.mark.skip_unless_on_darwin -@pytest.mark.usefixtures("setup_macos") -@pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download_macos(salt_test_command, shell): - """ - Test downloading of Salt packages and running various commands on Mac OS hosts - """ - _cmd = salt_test_command.split() - ret = shell.run(*_cmd, check=False) - assert ret.returncode == 0, ret - - -@pytest.mark.skip_unless_on_windows -@pytest.mark.usefixtures("setup_windows") -@pytest.mark.parametrize("salt_test_command", get_salt_test_commands()) -def test_download_windows(salt_test_command, shell): - """ - Test downloading of Salt packages and running various commands on Windows hosts - """ - _cmd = salt_test_command.split() - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") - _cmd[0] = str(root_dir / _cmd[0]) - - ret = shell.run(*_cmd, check=False) +@pytest.mark.parametrize("salt_test_command", get_salt_test_commands(), indirect=True) +def test_download(shell, salt_test_command): + """ + Test downloading of Salt packages and running various commands. + """ + ret = shell.run(*salt_test_command, check=False) assert ret.returncode == 0, ret diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py index ea9e6a81b84..7037763064e 100644 --- a/pkg/tests/integration/test_pip.py +++ b/pkg/tests/integration/test_pip.py @@ -13,25 +13,21 @@ def pypath(): if platform.is_windows(): return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt") elif platform.is_darwin(): - return pathlib.Path(f"{os.sep}opt", "salt", "bin") + return pathlib.Path("/opt", "salt", "bin") else: - return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin") + return pathlib.Path("/opt", "saltstack", "salt", "bin") @pytest.fixture(autouse=True) -def wipe_pydeps(install_salt, extras_pypath): +def wipe_pydeps(shell, install_salt, extras_pypath): try: yield finally: # Note, uninstalling anything with an associated script will leave the script. # This is due to a bug in pip. for dep in ["pep8", "PyGithub"]: - subprocess.run( - install_salt.binary_paths["pip"] + ["uninstall", "-y", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, + shell.run( + *(install_salt.binary_paths["pip"] + ["uninstall", "-y", dep]), ) shutil.rmtree(extras_pypath, ignore_errors=True) @@ -56,32 +52,24 @@ def test_pip_install(salt_call_cli): assert "The github execution module cannot be loaded" in use_lib.stderr -def test_pip_install_extras(install_salt, extras_pypath): +def test_pip_install_extras(shell, install_salt, extras_pypath_bin): """ Test salt-pip installs into the correct directory """ dep = "pep8" - extras_keyword = "extras" + extras_keyword = "extras-3" if platform.is_windows(): - check_path = extras_pypath / f"{dep}.exe" + check_path = extras_pypath_bin / f"{dep}.exe" else: - check_path = extras_pypath / dep + check_path = extras_pypath_bin / dep - install_ret = subprocess.run( - install_salt.binary_paths["pip"] + ["install", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + install_ret = shell.run(*(install_salt.binary_paths["pip"] + ["install", dep])) assert install_ret.returncode == 0 - ret = subprocess.run( - install_salt.binary_paths["pip"] + ["list", "--format=json"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + ret = shell.run(*(install_salt.binary_paths["pip"] + ["list", "--format=json"])) assert ret.returncode == 0 - pkgs_installed = json.loads(ret.stdout.strip().decode()) - for pkg in pkgs_installed: + assert ret.data # We can parse the JSON output + for pkg in ret.data: if pkg["name"] == dep: break else: @@ -89,15 +77,14 @@ def test_pip_install_extras(install_salt, extras_pypath): f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}" ) - show_ret = subprocess.run( - install_salt.binary_paths["pip"] + ["show", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) + show_ret = shell.run(*(install_salt.binary_paths["pip"] + ["show", dep])) assert show_ret.returncode == 0 - assert extras_keyword in show_ret.stdout.decode() + assert extras_keyword in show_ret.stdout assert check_path.exists() + ret = shell.run(str(check_path), "--version") + assert ret.returncode == 0 + def demote(user_uid, user_gid): def result(): @@ -108,8 +95,8 @@ def demote(user_uid, user_gid): @pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows") -def test_pip_non_root(install_salt, test_account, extras_pypath): - check_path = extras_pypath / "pep8" +def test_pip_non_root(shell, install_salt, test_account, extras_pypath_bin): + check_path = extras_pypath_bin / "pep8" # We should be able to issue a --help without being root ret = subprocess.run( install_salt.binary_paths["salt"] + ["--help"], @@ -156,3 +143,44 @@ def test_pip_non_root(install_salt, test_account, extras_pypath): assert check_path.exists() assert ret.returncode == 0, ret.stderr + + +def test_pip_install_salt_extension_in_extras(install_salt, extras_pypath, shell): + """ + Test salt-pip installs into the correct directory and the salt extension + is properly loaded. + """ + dep = "salt-analytics-framework" + dep_version = "0.1.0" + + install_ret = shell.run( + *(install_salt.binary_paths["pip"] + ["install", f"{dep}=={dep_version}"]), + ) + assert install_ret.returncode == 0 + + ret = shell.run( + *(install_salt.binary_paths["pip"] + ["list", "--format=json"]), + ) + assert ret.returncode == 0 + pkgs_installed = json.loads(ret.stdout.strip()) + for pkg in pkgs_installed: + if pkg["name"] == dep: + break + else: + pytest.fail( + f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}" + ) + + show_ret = shell.run( + *(install_salt.binary_paths["pip"] + ["show", dep]), + ) + assert show_ret.returncode == 0 + + assert extras_pypath.joinpath("saf").is_dir() + + ret = shell.run( + *(install_salt.binary_paths["minion"] + ["--versions-report"]), + ) + assert show_ret.returncode == 0 + assert "Salt Extensions" in ret.stdout + assert f"{dep}: {dep_version}" in ret.stdout diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 4e7ddfda0a0..d1c8d504fa0 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -1,3 +1,6 @@ +import pathlib +import subprocess + import psutil import pytest import yaml @@ -5,6 +8,7 @@ from pytestskipmarkers.utils import platform pytestmark = [ pytest.mark.skip_on_windows, + pytest.mark.skip_on_darwin, ] @@ -12,11 +16,56 @@ def test_salt_user_master(salt_master, install_salt): """ Test the correct user is running the Salt Master """ - if platform.is_windows() or platform.is_darwin(): - pytest.skip("Package does not have user set. Not testing user") match = False for proc in psutil.Process(salt_master.pid).children(): assert proc.username() == "salt" match = True assert match + + +def test_salt_user_home(install_salt): + """ + Test the correct user is running the Salt Master + """ + proc = subprocess.run( + ["getent", "passwd", "salt"], check=False, capture_output=True + ) + assert proc.returncode == 0 + home = "" + try: + home = proc.stdout.decode().split(":")[5] + except: + pass + assert home == "/opt/saltstack/salt" + + +def test_salt_user_group(install_salt): + """ + Test the salt user is the salt group + """ + proc = subprocess.run(["id", "salt"], check=False, capture_output=True) + assert proc.returncode == 0 + in_group = False + try: + for group in proc.stdout.decode().split(" "): + if "salt" in group: + in_group = True + except: + pass + assert in_group is True + + +def test_salt_cloud_dirs(install_salt): + """ + Test the correct user is running the Salt Master + """ + paths = [ + "/opt/saltstack/salt/lib/python3.10/site-packages/salt/cloud/deploy", + "/etc/salt/cloud.deploy.d", + ] + for name in paths: + path = pathlib.Path(name) + assert path.exists() + assert path.owner() == "salt" + assert path.group() == "salt" diff --git a/pkg/tests/integration/test_ssm.py b/pkg/tests/integration/test_ssm.py new file mode 100644 index 00000000000..059766caf17 --- /dev/null +++ b/pkg/tests/integration/test_ssm.py @@ -0,0 +1,15 @@ +import os + +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_windows, +] + + +def test_ssm_present(install_salt): + """ + The ssm.exe binary needs to be present in both the zip and the exe/msi + builds + """ + assert os.path.exists(install_salt.ssm_bin) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 90abf8b88e4..57b6ccd4d00 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -601,7 +601,7 @@ class SaltPkgInstall: else: log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) - if not (platform.is_darwin() or platform.is_windows()): + if not platform.is_darwin() and not platform.is_windows(): # Make sure we don't have any trailing references to old package file locations assert "No such file or directory" not in ret.stdout assert "/saltstack/salt/run" not in ret.stdout diff --git a/pkg/tests/support/paths.py b/pkg/tests/support/paths.py new file mode 100644 index 00000000000..a8a82bce0e0 --- /dev/null +++ b/pkg/tests/support/paths.py @@ -0,0 +1,102 @@ +""" + :codeauthor: Pedro Algarvio (pedro@algarvio.me) + :copyright: Copyright 2017 by the SaltStack Team, see AUTHORS for more details. + :license: Apache 2.0, see LICENSE for more details. + + + tests.support.paths + ~~~~~~~~~~~~~~~~~~~ + + Tests related paths +""" + +import logging +import os +import re +import sys +import tempfile + +log = logging.getLogger(__name__) + +SALT_CODE_DIR = os.path.join( + os.path.dirname( + os.path.dirname( + os.path.dirname( + os.path.dirname(os.path.normpath(os.path.abspath(__file__))) + ) + ) + ), + "salt", +) +TESTS_DIR = os.path.join(os.path.dirname(SALT_CODE_DIR), "tests") +if TESTS_DIR.startswith("//"): + # Have we been given an initial double forward slash? Ditch it! + TESTS_DIR = TESTS_DIR[1:] +if sys.platform.startswith("win"): + TESTS_DIR = os.path.normcase(TESTS_DIR) +CODE_DIR = os.path.dirname(TESTS_DIR) +if sys.platform.startswith("win"): + CODE_DIR = CODE_DIR.replace("\\", "\\\\") +UNIT_TEST_DIR = os.path.join(TESTS_DIR, "unit") +INTEGRATION_TEST_DIR = os.path.join(TESTS_DIR, "integration") + +# Let's inject CODE_DIR so salt is importable if not there already +if TESTS_DIR in sys.path: + sys.path.remove(TESTS_DIR) +if CODE_DIR in sys.path and sys.path[0] != CODE_DIR: + sys.path.remove(CODE_DIR) +if CODE_DIR not in sys.path: + sys.path.insert(0, CODE_DIR) +if TESTS_DIR not in sys.path: + sys.path.insert(1, TESTS_DIR) + +SYS_TMP_DIR = os.path.abspath( + os.path.realpath( + # Avoid ${TMPDIR} and gettempdir() on MacOS as they yield a base path too long + # for unix sockets: ``error: AF_UNIX path too long`` + # Gentoo Portage prefers ebuild tests are rooted in ${TMPDIR} + os.environ.get("TMPDIR", tempfile.gettempdir()) + if not sys.platform.startswith("darwin") + else "/tmp" + ) +) +TMP = os.path.join(SYS_TMP_DIR, "salt-tests-tmpdir") +TMP_ROOT_DIR = os.path.join(TMP, "rootdir") +FILES = os.path.join(INTEGRATION_TEST_DIR, "files") +BASE_FILES = os.path.join(INTEGRATION_TEST_DIR, "files", "file", "base") +PROD_FILES = os.path.join(INTEGRATION_TEST_DIR, "files", "file", "prod") +PYEXEC = "python{}.{}".format(*sys.version_info) +MOCKBIN = os.path.join(INTEGRATION_TEST_DIR, "mockbin") +SCRIPT_DIR = os.path.join(CODE_DIR, "scripts") +TMP_STATE_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-state-tree") +TMP_PILLAR_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-pillar-tree") +TMP_PRODENV_STATE_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-prodenv-state-tree") +TMP_PRODENV_PILLAR_TREE = os.path.join(SYS_TMP_DIR, "salt-temp-prodenv-pillar-tree") +TMP_CONF_DIR = TMP_MINION_CONF_DIR = os.path.join(TMP, "config") +TMP_SUB_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, "sub-minion") +TMP_SYNDIC_MINION_CONF_DIR = os.path.join(TMP_CONF_DIR, "syndic-minion") +TMP_SYNDIC_MASTER_CONF_DIR = os.path.join(TMP_CONF_DIR, "syndic-master") +TMP_SSH_CONF_DIR = TMP_MINION_CONF_DIR +CONF_DIR = os.path.join(INTEGRATION_TEST_DIR, "files", "conf") +PILLAR_DIR = os.path.join(FILES, "pillar") +TMP_SCRIPT_DIR = os.path.join(TMP, "scripts") +ENGINES_DIR = os.path.join(FILES, "engines") +LOG_HANDLERS_DIR = os.path.join(FILES, "log_handlers") + + +def list_test_mods(): + """ + A generator which returns all of the test files + """ + test_re = re.compile(r"^test_.+\.py$") + for dirname in (UNIT_TEST_DIR, INTEGRATION_TEST_DIR): + test_type = os.path.basename(dirname) + for root, _, files in os.walk(dirname): + parent_mod = root[len(dirname) :].lstrip(os.sep).replace(os.sep, ".") + for filename in files: + if test_re.match(filename): + mod_name = test_type + if parent_mod: + mod_name += "." + parent_mod + mod_name += "." + filename[:-3] + yield mod_name diff --git a/pkg/tests/support/runtests.py b/pkg/tests/support/runtests.py new file mode 100644 index 00000000000..ce5c9644cd3 --- /dev/null +++ b/pkg/tests/support/runtests.py @@ -0,0 +1,209 @@ +""" + :codeauthor: Pedro Algarvio (pedro@algarvio.me) + + .. _runtime_vars: + + Runtime Variables + ----------------- + + :command:`salt-runtests` provides a variable, :py:attr:`RUNTIME_VARS` which has some common paths defined at + startup: + + .. autoattribute:: tests.support.runtests.RUNTIME_VARS + :annotation: + + :TMP: Tests suite temporary directory + :TMP_CONF_DIR: Configuration directory from where the daemons that :command:`salt-runtests` starts get their + configuration files. + :TMP_CONF_MASTER_INCLUDES: Salt Master configuration files includes directory. See + :salt_conf_master:`default_include`. + :TMP_CONF_MINION_INCLUDES: Salt Minion configuration files includes directory. Seei + :salt_conf_minion:`include`. + :TMP_CONF_CLOUD_INCLUDES: Salt cloud configuration files includes directory. The same as the salt master and + minion includes configuration, though under a different directory name. + :TMP_CONF_CLOUD_PROFILE_INCLUDES: Salt cloud profiles configuration files includes directory. Same as above. + :TMP_CONF_CLOUD_PROVIDER_INCLUDES: Salt cloud providers configuration files includes directory. Same as above. + :TMP_SCRIPT_DIR: Temporary scripts directory from where the Salt CLI tools will be called when running tests. + :TMP_SALT_INTEGRATION_FILES: Temporary directory from where Salt's test suite integration files are copied to. + :TMP_BASEENV_STATE_TREE: Salt master's **base** environment state tree directory + :TMP_PRODENV_STATE_TREE: Salt master's **production** environment state tree directory + :TMP_BASEENV_PILLAR_TREE: Salt master's **base** environment pillar tree directory + :TMP_PRODENV_PILLAR_TREE: Salt master's **production** environment pillar tree directory + + + Use it on your test case in case of need. As simple as: + + .. code-block:: python + + import os + from tests.support.runtests import RUNTIME_VARS + + # Path to the testing minion configuration file + minion_config_path = os.path.join(RUNTIME_VARS.TMP_CONF_DIR, 'minion') + + .. _`pytest`: http://pytest.org + """ + +import logging +import os +import shutil + +import salt.utils.path +import salt.utils.platform +import tests.support.paths as paths + +try: + import pwd +except ImportError: + import salt.utils.win_functions + +log = logging.getLogger(__name__) + + +def this_user(): + """ + Get the user associated with the current process. + """ + if salt.utils.platform.is_windows(): + return salt.utils.win_functions.get_current_user(with_domain=False) + return pwd.getpwuid(os.getuid())[0] + + +class RootsDict(dict): + def merge(self, data): + for key, values in data.items(): + if key not in self: + self[key] = values + continue + for value in values: + if value not in self[key]: + self[key].append(value) + return self + + def to_dict(self): + return dict(self) + + +def recursive_copytree(source, destination, overwrite=False): + for root, dirs, files in os.walk(source): + for item in dirs: + src_path = os.path.join(root, item) + dst_path = os.path.join( + destination, src_path.replace(source, "").lstrip(os.sep) + ) + if not os.path.exists(dst_path): + log.debug("Creating directory: %s", dst_path) + os.makedirs(dst_path) + for item in files: + src_path = os.path.join(root, item) + dst_path = os.path.join( + destination, src_path.replace(source, "").lstrip(os.sep) + ) + if os.path.exists(dst_path) and not overwrite: + if os.stat(src_path).st_mtime > os.stat(dst_path).st_mtime: + log.debug("Copying %s to %s", src_path, dst_path) + shutil.copy2(src_path, dst_path) + else: + if not os.path.isdir(os.path.dirname(dst_path)): + log.debug("Creating directory: %s", os.path.dirname(dst_path)) + os.makedirs(os.path.dirname(dst_path)) + log.debug("Copying %s to %s", src_path, dst_path) + shutil.copy2(src_path, dst_path) + + +class RuntimeVars: + + __self_attributes__ = ("_vars", "_locked", "lock") + + def __init__(self, **kwargs): + self._vars = kwargs + self._locked = False + + def lock(self): + # Late import + from salt.utils.immutabletypes import freeze + + frozen_vars = freeze(self._vars.copy()) + self._vars = frozen_vars + self._locked = True + + def __iter__(self): + yield from self._vars.items() + + def __getattribute__(self, name): + if name in object.__getattribute__(self, "_vars"): + return object.__getattribute__(self, "_vars")[name] + return object.__getattribute__(self, name) + + def __setattr__(self, name, value): + if getattr(self, "_locked", False) is True: + raise RuntimeError( + "After {} is locked, no additional data can be added to it".format( + self.__class__.__name__ + ) + ) + if name in object.__getattribute__(self, "__self_attributes__"): + object.__setattr__(self, name, value) + return + self._vars[name] = value + + +# <---- Helper Methods ----------------------------------------------------------------------------------------------- + + +# ----- Global Variables --------------------------------------------------------------------------------------------> +XML_OUTPUT_DIR = os.environ.get( + "SALT_XML_TEST_REPORTS_DIR", os.path.join(paths.TMP, "xml-test-reports") +) +# <---- Global Variables --------------------------------------------------------------------------------------------- + + +# ----- Tests Runtime Variables -------------------------------------------------------------------------------------> + +RUNTIME_VARS = RuntimeVars( + TMP=paths.TMP, + SYS_TMP_DIR=paths.SYS_TMP_DIR, + FILES=paths.FILES, + CONF_DIR=paths.CONF_DIR, + PILLAR_DIR=paths.PILLAR_DIR, + ENGINES_DIR=paths.ENGINES_DIR, + LOG_HANDLERS_DIR=paths.LOG_HANDLERS_DIR, + TMP_ROOT_DIR=paths.TMP_ROOT_DIR, + TMP_CONF_DIR=paths.TMP_CONF_DIR, + TMP_MINION_CONF_DIR=paths.TMP_MINION_CONF_DIR, + TMP_CONF_MASTER_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "master.d"), + TMP_CONF_MINION_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "minion.d"), + TMP_CONF_PROXY_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "proxy.d"), + TMP_CONF_CLOUD_INCLUDES=os.path.join(paths.TMP_CONF_DIR, "cloud.conf.d"), + TMP_CONF_CLOUD_PROFILE_INCLUDES=os.path.join( + paths.TMP_CONF_DIR, "cloud.profiles.d" + ), + TMP_CONF_CLOUD_PROVIDER_INCLUDES=os.path.join( + paths.TMP_CONF_DIR, "cloud.providers.d" + ), + TMP_SUB_MINION_CONF_DIR=paths.TMP_SUB_MINION_CONF_DIR, + TMP_SYNDIC_MASTER_CONF_DIR=paths.TMP_SYNDIC_MASTER_CONF_DIR, + TMP_SYNDIC_MINION_CONF_DIR=paths.TMP_SYNDIC_MINION_CONF_DIR, + TMP_SSH_CONF_DIR=paths.TMP_SSH_CONF_DIR, + TMP_SCRIPT_DIR=paths.TMP_SCRIPT_DIR, + TMP_STATE_TREE=paths.TMP_STATE_TREE, + TMP_BASEENV_STATE_TREE=paths.TMP_STATE_TREE, + TMP_PILLAR_TREE=paths.TMP_PILLAR_TREE, + TMP_BASEENV_PILLAR_TREE=paths.TMP_PILLAR_TREE, + TMP_PRODENV_STATE_TREE=paths.TMP_PRODENV_STATE_TREE, + TMP_PRODENV_PILLAR_TREE=paths.TMP_PRODENV_PILLAR_TREE, + SHELL_TRUE_PATH=salt.utils.path.which("true") + if not salt.utils.platform.is_windows() + else "cmd /c exit 0 > nul", + SHELL_FALSE_PATH=salt.utils.path.which("false") + if not salt.utils.platform.is_windows() + else "cmd /c exit 1 > nul", + RUNNING_TESTS_USER=this_user(), + RUNTIME_CONFIGS={}, + CODE_DIR=paths.CODE_DIR, + SALT_CODE_DIR=paths.SALT_CODE_DIR, + BASE_FILES=paths.BASE_FILES, + PROD_FILES=paths.PROD_FILES, + TESTS_DIR=paths.TESTS_DIR, +) +# <---- Tests Runtime Variables -------------------------------------------------------------------------------------- diff --git a/pkg/tests/support/sminion.py b/pkg/tests/support/sminion.py new file mode 100644 index 00000000000..abf45fd7bde --- /dev/null +++ b/pkg/tests/support/sminion.py @@ -0,0 +1,256 @@ +""" +tests.support.sminion +~~~~~~~~~~~~~~~~~~~~~ + +SMinion's support functions +""" + +import fnmatch +import hashlib +import logging +import os +import shutil +import sys + +import salt.minion +import salt.utils.path +import salt.utils.stringutils +from tests.support.runtests import RUNTIME_VARS + +log = logging.getLogger(__name__) + +DEFAULT_SMINION_ID = "pytest-internal-sminion" + + +def build_minion_opts( + minion_id=None, + root_dir=None, + initial_conf_file=None, + minion_opts_overrides=None, + skip_cached_opts=False, + cache_opts=True, + minion_role=None, +): + if minion_id is None: + minion_id = DEFAULT_SMINION_ID + if skip_cached_opts is False: + try: + opts_cache = build_minion_opts.__cached_opts__ + except AttributeError: + opts_cache = build_minion_opts.__cached_opts__ = {} + cached_opts = opts_cache.get(minion_id) + if cached_opts: + return cached_opts + + log.info("Generating testing minion %r configuration...", minion_id) + if root_dir is None: + hashed_minion_id = hashlib.sha1() + hashed_minion_id.update(salt.utils.stringutils.to_bytes(minion_id)) + root_dir = os.path.join( + RUNTIME_VARS.TMP_ROOT_DIR, hashed_minion_id.hexdigest()[:6] + ) + + if initial_conf_file is not None: + minion_opts = salt.config._read_conf_file( + initial_conf_file + ) # pylint: disable=protected-access + else: + minion_opts = {} + + conf_dir = os.path.join(root_dir, "conf") + conf_file = os.path.join(conf_dir, "minion") + + minion_opts["id"] = minion_id + minion_opts["conf_file"] = conf_file + minion_opts["root_dir"] = root_dir + minion_opts["cachedir"] = "cache" + minion_opts["user"] = RUNTIME_VARS.RUNNING_TESTS_USER + minion_opts["pki_dir"] = "pki" + minion_opts["hosts.file"] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "hosts") + minion_opts["aliases.file"] = os.path.join(RUNTIME_VARS.TMP_ROOT_DIR, "aliases") + minion_opts["file_client"] = "local" + minion_opts["server_id_use_crc"] = "adler32" + minion_opts["pillar_roots"] = {"base": [RUNTIME_VARS.TMP_PILLAR_TREE]} + minion_opts["file_roots"] = { + "base": [ + # Let's support runtime created files that can be used like: + # salt://my-temp-file.txt + RUNTIME_VARS.TMP_STATE_TREE + ], + # Alternate root to test __env__ choices + "prod": [ + os.path.join(RUNTIME_VARS.FILES, "file", "prod"), + RUNTIME_VARS.TMP_PRODENV_STATE_TREE, + ], + } + if initial_conf_file and initial_conf_file.startswith(RUNTIME_VARS.FILES): + # We assume we were passed a minion configuration file defined fo testing and, as such + # we define the file and pillar roots to include the testing states/pillar trees + minion_opts["pillar_roots"]["base"].append( + os.path.join(RUNTIME_VARS.FILES, "pillar", "base"), + ) + minion_opts["file_roots"]["base"].append( + os.path.join(RUNTIME_VARS.FILES, "file", "base"), + ) + minion_opts["file_roots"]["prod"].append( + os.path.join(RUNTIME_VARS.FILES, "file", "prod"), + ) + + # We need to copy the extension modules into the new master root_dir or + # it will be prefixed by it + extension_modules_path = os.path.join(root_dir, "extension_modules") + if not os.path.exists(extension_modules_path): + shutil.copytree( + os.path.join(RUNTIME_VARS.FILES, "extension_modules"), + extension_modules_path, + ) + minion_opts["extension_modules"] = extension_modules_path + + # Custom grains + if "grains" not in minion_opts: + minion_opts["grains"] = {} + if minion_role is not None: + minion_opts["grains"]["role"] = minion_role + + # Under windows we can't seem to properly create a virtualenv off of another + # virtualenv, we can on linux but we will still point to the virtualenv binary + # outside the virtualenv running the test suite, if that's the case. + try: + real_prefix = sys.real_prefix + # The above attribute exists, this is a virtualenv + if salt.utils.platform.is_windows(): + virtualenv_binary = os.path.join(real_prefix, "Scripts", "virtualenv.exe") + else: + # We need to remove the virtualenv from PATH or we'll get the virtualenv binary + # from within the virtualenv, we don't want that + path = os.environ.get("PATH") + if path is not None: + path_items = path.split(os.pathsep) + for item in path_items[:]: + if item.startswith(sys.base_prefix): + path_items.remove(item) + os.environ["PATH"] = os.pathsep.join(path_items) + virtualenv_binary = salt.utils.path.which("virtualenv") + if path is not None: + # Restore previous environ PATH + os.environ["PATH"] = path + if not virtualenv_binary.startswith(real_prefix): + virtualenv_binary = None + if virtualenv_binary and not os.path.exists(virtualenv_binary): + # It doesn't exist?! + virtualenv_binary = None + except AttributeError: + # We're not running inside a virtualenv + virtualenv_binary = None + if virtualenv_binary: + minion_opts["venv_bin"] = virtualenv_binary + + # Override minion_opts with minion_opts_overrides + if minion_opts_overrides: + minion_opts.update(minion_opts_overrides) + + if not os.path.exists(conf_dir): + os.makedirs(conf_dir) + + with salt.utils.files.fopen(conf_file, "w") as fp_: + salt.utils.yaml.safe_dump(minion_opts, fp_, default_flow_style=False) + + log.info("Generating testing minion %r configuration completed.", minion_id) + minion_opts = salt.config.minion_config( + conf_file, minion_id=minion_id, cache_minion_id=True + ) + salt.utils.verify.verify_env( + [ + os.path.join(minion_opts["pki_dir"], "accepted"), + os.path.join(minion_opts["pki_dir"], "rejected"), + os.path.join(minion_opts["pki_dir"], "pending"), + os.path.dirname(minion_opts["log_file"]), + minion_opts["extension_modules"], + minion_opts["cachedir"], + minion_opts["sock_dir"], + RUNTIME_VARS.TMP_STATE_TREE, + RUNTIME_VARS.TMP_PILLAR_TREE, + RUNTIME_VARS.TMP_PRODENV_STATE_TREE, + RUNTIME_VARS.TMP, + ], + RUNTIME_VARS.RUNNING_TESTS_USER, + root_dir=root_dir, + ) + if cache_opts: + try: + opts_cache = build_minion_opts.__cached_opts__ + except AttributeError: + opts_cache = build_minion_opts.__cached_opts__ = {} + opts_cache[minion_id] = minion_opts + return minion_opts + + +def create_sminion( + minion_id=None, + root_dir=None, + initial_conf_file=None, + sminion_cls=salt.minion.SMinion, + minion_opts_overrides=None, + skip_cached_minion=False, + cache_sminion=True, +): + if minion_id is None: + minion_id = DEFAULT_SMINION_ID + if skip_cached_minion is False: + try: + minions_cache = create_sminion.__cached_minions__ + except AttributeError: + create_sminion.__cached_minions__ = {} + cached_minion = create_sminion.__cached_minions__.get(minion_id) + if cached_minion: + return cached_minion + minion_opts = build_minion_opts( + minion_id=minion_id, + root_dir=root_dir, + initial_conf_file=initial_conf_file, + minion_opts_overrides=minion_opts_overrides, + skip_cached_opts=skip_cached_minion, + cache_opts=cache_sminion, + ) + log.info("Instantiating a testing %s(%s)", sminion_cls.__name__, minion_id) + sminion = sminion_cls(minion_opts) + if cache_sminion: + try: + minions_cache = create_sminion.__cached_minions__ + except AttributeError: + minions_cache = create_sminion.__cached_minions__ = {} + minions_cache[minion_id] = sminion + return sminion + + +def check_required_sminion_attributes(sminion_attr, required_items): + """ + :param sminion_attr: The name of the sminion attribute to check, such as 'functions' or 'states' + :param required_items: The items that must be part of the designated sminion attribute for the decorated test + :return The packages that are not available + """ + required_salt_items = set(required_items) + sminion = create_sminion(minion_id=DEFAULT_SMINION_ID) + available_items = list(getattr(sminion, sminion_attr)) + not_available_items = set() + + name = "__not_available_{items}s__".format(items=sminion_attr) + if not hasattr(sminion, name): + setattr(sminion, name, set()) + + cached_not_available_items = getattr(sminion, name) + + for not_available_item in cached_not_available_items: + if not_available_item in required_salt_items: + not_available_items.add(not_available_item) + required_salt_items.remove(not_available_item) + + for required_item_name in required_salt_items: + search_name = required_item_name + if "." not in search_name: + search_name += ".*" + if not fnmatch.filter(available_items, search_name): + not_available_items.add(required_item_name) + cached_not_available_items.add(required_item_name) + + return not_available_items diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index a56af4075b8..02991063f1b 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -360,7 +360,6 @@ $modules = "acme", "runit", "s6", "scsi", - "seed", "sensors", "service", "shadow", diff --git a/salt/__init__.py b/salt/__init__.py index e06b8ad7127..6649fdf5683 100644 --- a/salt/__init__.py +++ b/salt/__init__.py @@ -140,9 +140,3 @@ del __define_global_system_encoding_variable__ import salt._logging # isort:skip # pylint: enable=unused-import - - -# When we are running in a 'onedir' environment, setup the path for user -# installed packages. -if hasattr(sys, "RELENV"): - sys.path.insert(0, str(sys.RELENV / "extras-{}.{}".format(*sys.version_info))) diff --git a/salt/cli/ssh.py b/salt/cli/ssh.py index 6048cb5f58f..78522a044a9 100644 --- a/salt/cli/ssh.py +++ b/salt/cli/ssh.py @@ -16,4 +16,7 @@ class SaltSSH(salt.utils.parsers.SaltSSHOptionParser): self.parse_args() ssh = salt.client.ssh.SSH(self.config) - ssh.run() + try: + ssh.run() + finally: + ssh.fsclient.destroy() diff --git a/salt/cloud/__init__.py b/salt/cloud/__init__.py index 2b21483599d..a4d11eed59a 100644 --- a/salt/cloud/__init__.py +++ b/salt/cloud/__init__.py @@ -1427,7 +1427,8 @@ class Cloud: raise SaltCloudSystemExit("Failed to deploy VM") continue if self.opts.get("show_deploy_args", False) is False: - ret[name].pop("deploy_kwargs", None) + if isinstance(ret[name], dict): + ret[name].pop("deploy_kwargs", None) except (SaltCloudSystemExit, SaltCloudConfigError) as exc: if len(names) == 1: raise diff --git a/salt/metaproxy/deltaproxy.py b/salt/metaproxy/deltaproxy.py index c3003b368f7..d866d6f4c1d 100644 --- a/salt/metaproxy/deltaproxy.py +++ b/salt/metaproxy/deltaproxy.py @@ -231,10 +231,11 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) log.info("Added mine.update to scheduler") else: - self.schedule.delete_job("__mine_interval", persist=True) + self.schedule.delete_job("__mine_interval", persist=True, fire_event=False) # add master_alive job if enabled if self.opts["transport"] != "tcp" and self.opts["master_alive_interval"] > 0: @@ -250,6 +251,7 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) if ( self.opts["master_failback"] @@ -268,18 +270,24 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) else: self.schedule.delete_job( - salt.minion.master_event(type="failback"), persist=True + salt.minion.master_event(type="failback"), + persist=True, + fire_event=False, ) else: self.schedule.delete_job( salt.minion.master_event(type="alive", master=self.opts["master"]), persist=True, + fire_event=False, ) self.schedule.delete_job( - salt.minion.master_event(type="failback"), persist=True + salt.minion.master_event(type="failback"), + persist=True, + fire_event=False, ) # proxy keepalive @@ -304,10 +312,15 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) - self.schedule.enable_schedule() + self.schedule.enable_schedule(fire_event=False) else: - self.schedule.delete_job("__proxy_keepalive", persist=True) + self.schedule.delete_job( + "__proxy_keepalive", + persist=True, + fire_event=False, + ) # Sync the grains here so the proxy can communicate them to the master self.functions["saltutil.sync_grains"](saltenv="base") @@ -321,10 +334,11 @@ def post_master_init(self, master): self.proxy_context = {} self.add_periodic_callback("cleanup", self.cleanup_subprocesses) + _failed = list() if self.opts["proxy"].get("parallel_startup"): log.debug("Initiating parallel startup for proxies") with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [ + futures = { executor.submit( subproxy_post_master_init, _id, @@ -332,12 +346,22 @@ def post_master_init(self, master): self.opts, self.proxy, self.utils, - ) + ): _id for _id in self.opts["proxy"].get("ids", []) - ] + } - for f in concurrent.futures.as_completed(futures): - sub_proxy_data = f.result() + for future in concurrent.futures.as_completed(futures): + try: + sub_proxy_data = future.result() + except Exception as exc: # pylint: disable=broad-except + _id = futures[future] + log.info( + "An exception occured during initialization for %s, skipping: %s", + _id, + exc, + ) + _failed.append(_id) + continue minion_id = sub_proxy_data["proxy_opts"].get("id") if sub_proxy_data["proxy_minion"]: @@ -347,16 +371,24 @@ def post_master_init(self, master): if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]: self.deltaproxy_objs[ minion_id - ].req_channel = salt.transport.client.AsyncReqChannel.factory( + ].req_channel = salt.channel.client.AsyncReqChannel.factory( sub_proxy_data["proxy_opts"], io_loop=self.io_loop ) else: log.debug("Initiating non-parallel startup for proxies") for _id in self.opts["proxy"].get("ids", []): - sub_proxy_data = subproxy_post_master_init( - _id, uid, self.opts, self.proxy, self.utils - ) - + try: + sub_proxy_data = subproxy_post_master_init( + _id, uid, self.opts, self.proxy, self.utils + ) + except Exception as exc: # pylint: disable=broad-except + log.info( + "An exception occured during initialization for %s, skipping: %s", + _id, + exc, + ) + _failed.append(_id) + continue minion_id = sub_proxy_data["proxy_opts"].get("id") if sub_proxy_data["proxy_minion"]: @@ -366,10 +398,12 @@ def post_master_init(self, master): if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]: self.deltaproxy_objs[ minion_id - ].req_channel = salt.transport.client.AsyncReqChannel.factory( + ].req_channel = salt.channel.client.AsyncReqChannel.factory( sub_proxy_data["proxy_opts"], io_loop=self.io_loop ) + if _failed: + log.info("Following sub proxies failed %s", _failed) self.ready = True @@ -535,10 +569,13 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils): } }, persist=True, + fire_event=False, ) - _proxy_minion.schedule.enable_schedule() + _proxy_minion.schedule.enable_schedule(fire_event=False) else: - _proxy_minion.schedule.delete_job("__proxy_keepalive", persist=True) + _proxy_minion.schedule.delete_job( + "__proxy_keepalive", persist=True, fire_event=False + ) return {"proxy_minion": _proxy_minion, "proxy_opts": proxyopts} diff --git a/salt/minion.py b/salt/minion.py index 6237fcc4b7f..3a7c26366fc 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -1363,7 +1363,7 @@ class Minion(MinionBase): ) # a long-running req channel - self.req_channel = salt.transport.client.AsyncReqChannel.factory( + self.req_channel = salt.channel.client.AsyncReqChannel.factory( self.opts, io_loop=self.io_loop ) @@ -2817,10 +2817,8 @@ class Minion(MinionBase): self.opts["master"], ) - self.req_channel = ( - salt.transport.client.AsyncReqChannel.factory( - self.opts, io_loop=self.io_loop - ) + self.req_channel = salt.channel.client.AsyncReqChannel.factory( + self.opts, io_loop=self.io_loop ) # put the current schedule into the new loaders diff --git a/salt/modules/saltutil.py b/salt/modules/saltutil.py index a692c3f34d4..ecf467046aa 100644 --- a/salt/modules/saltutil.py +++ b/salt/modules/saltutil.py @@ -381,6 +381,9 @@ def refresh_grains(**kwargs): refresh_pillar : True Set to ``False`` to keep pillar data from being refreshed. + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -389,6 +392,7 @@ def refresh_grains(**kwargs): """ kwargs = salt.utils.args.clean_kwargs(**kwargs) _refresh_pillar = kwargs.pop("refresh_pillar", True) + clean_pillar_cache = kwargs.pop("clean_pillar_cache", False) if kwargs: salt.utils.args.invalid_kwargs(kwargs) # Modules and pillar need to be refreshed in case grains changes affected @@ -396,14 +400,18 @@ def refresh_grains(**kwargs): # newly-reloaded grains to each execution module's __grains__ dunder. if _refresh_pillar: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) else: refresh_modules() return True def sync_grains( - saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, ): """ .. versionadded:: 0.10.0 @@ -430,6 +438,9 @@ def sync_grains( extmod_blacklist : None comma-separated list of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -441,7 +452,7 @@ def sync_grains( ret = _sync("grains", saltenv, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -915,7 +926,11 @@ def sync_log_handlers( def sync_pillar( - saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, ): """ .. versionadded:: 2015.8.11,2016.3.2 @@ -935,6 +950,9 @@ def sync_pillar( extmod_blacklist : None comma-separated list of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + .. note:: This function will raise an error if executed on a traditional (i.e. not masterless) minion @@ -953,7 +971,7 @@ def sync_pillar( ret = _sync("pillar", saltenv, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -998,7 +1016,13 @@ def sync_executors( return ret -def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None): +def sync_all( + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, +): """ .. versionchanged:: 2015.8.11,2016.3.2 On masterless minions, pillar modules are now synced, and refreshed @@ -1036,6 +1060,9 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist extmod_blacklist : None dictionary of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -1080,7 +1107,7 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist ret["pillar"] = sync_pillar(saltenv, False, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret diff --git a/salt/modules/win_lgpo_reg.py b/salt/modules/win_lgpo_reg.py index 4052de62bd3..2fd04bd3c73 100644 --- a/salt/modules/win_lgpo_reg.py +++ b/salt/modules/win_lgpo_reg.py @@ -381,7 +381,7 @@ def set_value( else: pol_data[key] = {v_name: {"data": v_data, "type": v_type}} - write_reg_pol(pol_data) + write_reg_pol(pol_data, policy_class=policy_class) return salt.utils.win_reg.set_value( hive=hive, @@ -464,7 +464,7 @@ def disable_value(key, v_name, policy_class="machine"): else: pol_data[key] = {"**del.{}".format(v_name): {"data": " ", "type": "REG_SZ"}} - write_reg_pol(pol_data) + write_reg_pol(pol_data, policy_class=policy_class) return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) @@ -534,7 +534,7 @@ def delete_value(key, v_name, policy_class="Machine"): else: return None - write_reg_pol(pol_data) + write_reg_pol(pol_data, policy_class=policy_class) return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) diff --git a/salt/roster/terraform.py b/salt/roster/terraform.py index 0c9f13df2cf..626f0f103c5 100644 --- a/salt/roster/terraform.py +++ b/salt/roster/terraform.py @@ -92,7 +92,9 @@ def _handle_old_salt_host_resource(resource): ret[MINION_ID] = attrs.get(MINION_ID) valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys()) for attr in valid_attrs: - ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr)) + ret[attr] = _cast_output_to_type( + attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr) + ) return ret @@ -110,7 +112,9 @@ def _handle_new_salt_host_resource(resource): ret[MINION_ID] = attrs.get(MINION_ID) valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys()) for attr in valid_attrs: - ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr)) + ret[attr] = _cast_output_to_type( + attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr) + ) log.info(ret) rets.append(ret) return rets @@ -134,8 +138,16 @@ def _add_ssh_key(ret): ret["priv"] = priv -def _cast_output_to_type(value, typ): +def _cast_output_to_type(attr, value, typ): """cast the value depending on the terraform type""" + if value is None: + # Timeout needs to default to 0 if the value is None + # The ssh command that is run cannot handle `-o ConnectTimeout=None` + if attr == "timeout": + return 0 + else: + return value + if value is None: return value if typ == "b": diff --git a/salt/states/file.py b/salt/states/file.py index 9f32151b8b1..f481537d529 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -334,6 +334,15 @@ __func_alias__ = { } +def _http_ftp_check(source): + """ + Check if source or sources is http, https or ftp. + """ + if isinstance(source, str): + return source.lower().startswith(("http:", "https:", "ftp:")) + return any([s.lower().startswith(("http:", "https:", "ftp:")) for s in source]) + + def _get_accumulator_filepath(): """ Return accumulator data path. @@ -2414,6 +2423,8 @@ def managed( - source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz - source_hash: md5=79eef25f9b0b2c642c62b7f737d4f53f + source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server. + Known issues: If the remote server URL has the hash file as an apparent sub-directory of the source file, the module will discover that it @@ -2946,6 +2957,9 @@ def managed( "'contents_grains' is permitted", ) + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + # If no source is specified, set replace to False, as there is nothing # with which to replace the file. if not source and contents_count == 0 and replace: @@ -5998,6 +6012,9 @@ def blockreplace( if not name: return _error(ret, "Must provide name to file.blockreplace") + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + if sources is None: sources = [] if source_hashes is None: @@ -6434,6 +6451,9 @@ def append( if not name: return _error(ret, "Must provide name to file.append") + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + name = os.path.expanduser(name) if sources is None: @@ -6718,6 +6738,9 @@ def prepend( if not name: return _error(ret, "Must provide name to file.prepend") + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + if sources is None: sources = [] @@ -8937,6 +8960,25 @@ def cached( else: source_sum = {} + if __opts__["test"]: + local_copy = __salt__["cp.is_cached"](name, saltenv=saltenv) + if local_copy: + if source_sum: + hash = __salt__["file.get_hash"](local_copy, __opts__["hash_type"]) + if hash == source_sum["hsum"]: + ret["comment"] = "File already cached: {}".format(name) + else: + ret[ + "comment" + ] = "Hashes don't match.\nFile will be cached: {}".format(name) + else: + ret["comment"] = "No hash found. File will be cached: {}".format(name) + else: + ret["comment"] = "File will be cached: {}".format(name) + ret["changes"] = {} + ret["result"] = None + return ret + if parsed.scheme in salt.utils.files.LOCAL_PROTOS: # Source is a local file path full_path = os.path.realpath(os.path.expanduser(parsed.path)) diff --git a/salt/states/win_lgpo_reg.py b/salt/states/win_lgpo_reg.py index 7a514068acb..8377817a198 100644 --- a/salt/states/win_lgpo_reg.py +++ b/salt/states/win_lgpo_reg.py @@ -72,6 +72,27 @@ def __virtual__(): return __virtualname__ +def _get_current(key, name, policy_class): + """ + Helper function to get the current state of the policy + """ + hive = "HKLM" + if policy_class == "User": + hive = "HKCU" + pol = __salt__["lgpo_reg.get_value"]( + key=key, v_name=name, policy_class=policy_class + ) + reg_raw = __utils__["reg.read_value"](hive=hive, key=key, vname=name) + + reg = {} + if reg_raw["vdata"] is not None: + reg["data"] = reg_raw["vdata"] + if reg_raw["vtype"] is not None: + reg["type"] = reg_raw["vtype"] + + return {"pol": pol, "reg": reg} + + def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"): r""" Ensure a registry setting is present in the Registry.pol file. @@ -128,16 +149,29 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = ( + str(old["pol"].get("data", "")) == str(v_data) + and old["pol"].get("type", "") == v_type ) - if old.get("data", "") == v_data and old.get("type", "") == v_type: - ret["comment"] = "Registry.pol value already present" + reg_correct = ( + str(old["reg"].get("data", "")) == str(v_data) + and old["reg"].get("type", "") == v_type + ) + + if pol_correct and reg_correct: + ret["comment"] = "Policy value already present\nRegistry value already present" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be set" + if not pol_correct: + ret["comment"] = "Policy value will be set" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be set" ret["result"] = None return ret @@ -149,15 +183,24 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") policy_class=policy_class, ) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class + new = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = ( + str(new["pol"]["data"]) == str(v_data) and new["pol"]["type"] == v_type + ) + reg_correct = ( + str(new["reg"]["data"]) == str(v_data) and new["reg"]["type"] == v_type ) - if str(new["data"]) == v_data and new["type"] == v_type: - ret["comment"] = "Registry.pol value has been set" + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value has been set" ret["result"] = True - else: - ret["comment"] = "Failed to set Registry.pol value" + elif not pol_correct: + ret["comment"] = "Failed to set policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to set registry value" changes = salt.utils.data.recursive_diff(old, new) @@ -206,30 +249,42 @@ def value_disabled(name, key, policy_class="Machine"): """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) - if old.get("data", "") == "**del.{}".format(name): - ret["comment"] = "Registry.pol value already disabled" + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = old["pol"].get("data", "") == "**del.{}".format(name) + reg_correct = old["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value already disabled" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be disabled" + if not pol_correct: + ret["comment"] = "Policy value will be disabled" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be removed" ret["result"] = None return ret __salt__["lgpo_reg.disable_value"](key=key, v_name=name, policy_class=policy_class) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) + new = _get_current(key=key, name=name, policy_class=policy_class) - if "**del." in str(new["data"]) and new["type"] == "REG_SZ": - ret["comment"] = "Registry.pol value disabled" + pol_correct = new["pol"].get("data", "") == "**del.{}".format(name) + reg_correct = new["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value disabled" ret["result"] = True - else: - ret["comment"] = "Failed to disable Registry.pol value" + elif not pol_correct: + ret["comment"] = "Failed to disable policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to remove registry value" changes = salt.utils.data.recursive_diff(old, new) @@ -278,32 +333,42 @@ def value_absent(name, key, policy_class="Machine"): """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) - if not old: - ret["comment"] = "Registry.pol value already absent" + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = old["pol"] == {} + reg_correct = old["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value already deleted" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be deleted" + if not pol_correct: + ret["comment"] = "Policy value will be deleted" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be deleted" ret["result"] = None return ret __salt__["lgpo_reg.delete_value"](key=key, v_name=name, policy_class=policy_class) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) + new = _get_current(key=key, name=name, policy_class=policy_class) - if not new: - ret["comment"] = "Registry.pol value deleted" + pol_correct = new["pol"] == {} + reg_correct = new["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value deleted" ret["result"] = True - # We're setting this here in case new is None - new = {} - else: - ret["comment"] = "Failed to delete Registry.pol value" + elif not pol_correct: + ret["comment"] = "Failed to delete policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to delete registry value" changes = salt.utils.data.recursive_diff(old, new) diff --git a/salt/transport/client.py b/salt/transport/client.py index 7ffc97fe8e7..bd79ac357b4 100644 --- a/salt/transport/client.py +++ b/salt/transport/client.py @@ -13,8 +13,6 @@ from salt.utils.versions import warn_until log = logging.getLogger(__name__) -# XXX: Add depreication warnings to start using salt.channel.client - class ReqChannel: """ diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py index ca13a498e3e..453afaaad78 100644 --- a/salt/transport/ipc.py +++ b/salt/transport/ipc.py @@ -13,7 +13,6 @@ import salt.ext.tornado.concurrent import salt.ext.tornado.gen import salt.ext.tornado.ioloop import salt.ext.tornado.netutil -import salt.transport.client import salt.transport.frame import salt.utils.msgpack from salt.ext.tornado.ioloop import IOLoop diff --git a/salt/transport/local.py b/salt/transport/local.py index 49fb1e0b588..e0a22b78cb1 100644 --- a/salt/transport/local.py +++ b/salt/transport/local.py @@ -1,7 +1,7 @@ import logging import salt.utils.files -from salt.transport.client import ReqChannel +from salt.channel.client import ReqChannel log = logging.getLogger(__name__) diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index 6a9e1138940..ddde882e764 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -25,10 +25,8 @@ import salt.ext.tornado.tcpclient import salt.ext.tornado.tcpserver import salt.master import salt.payload -import salt.transport.client import salt.transport.frame import salt.transport.ipc -import salt.transport.server import salt.utils.asynchronous import salt.utils.files import salt.utils.msgpack diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py index 814c2980d4a..6565dda59e6 100644 --- a/salt/utils/schedule.py +++ b/salt/utils/schedule.py @@ -315,7 +315,7 @@ class Schedule: exc_info_on_loglevel=logging.DEBUG, ) - def delete_job(self, name, persist=True): + def delete_job(self, name, persist=True, fire_event=True): """ Deletes a job from the scheduler. Ignore jobs from pillar """ @@ -325,12 +325,15 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot delete job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_delete_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_delete_complete", + ) # remove from self.intervals if name in self.intervals: @@ -349,7 +352,7 @@ class Schedule: self.splay = None self.opts["schedule"] = {} - def delete_job_prefix(self, name, persist=True): + def delete_job_prefix(self, name, persist=True, fire_event=True): """ Deletes a job from the scheduler. Ignores jobs from pillar """ @@ -361,12 +364,15 @@ class Schedule: if job.startswith(name): log.warning("Cannot delete job %s, it's in the pillar!", job) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_delete_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_delete_complete", + ) # remove from self.intervals for job in list(self.intervals.keys()): @@ -376,7 +382,7 @@ class Schedule: if persist: self.persist() - def add_job(self, data, persist=True): + def add_job(self, data, persist=True, fire_event=True): """ Adds a new job to the scheduler. The format is the same as required in the configuration file. See the docs on how YAML is interpreted into @@ -410,16 +416,19 @@ class Schedule: self.opts["schedule"].update(data) # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_add_complete", - ) + if fire_event: + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_add_complete", + ) if persist: self.persist() - def enable_job(self, name, persist=True): + def enable_job(self, name, persist=True, fire_event=True): """ Enable a job in the scheduler. Ignores jobs from pillar """ @@ -430,17 +439,20 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_enabled_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_enabled_job_complete", + ) if persist: self.persist() - def disable_job(self, name, persist=True): + def disable_job(self, name, persist=True, fire_event=True): """ Disable a job in the scheduler. Ignores jobs from pillar """ @@ -451,23 +463,26 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - # Fire the complete event back along with updated list of schedule - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_disabled_job_complete", - ) + if fire_event: + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + # Fire the complete event back along with updated list of schedule + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_disabled_job_complete", + ) if persist: self.persist() - def modify_job(self, name, schedule, persist=True): + def modify_job(self, name, schedule, persist=True, fire_event=True): """ Modify a job in the scheduler. Ignores jobs from pillar """ # ensure job exists, then replace it if name in self.opts["schedule"]: - self.delete_job(name, persist) + self.delete_job(name, persist, fire_event) elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) return @@ -511,34 +526,40 @@ class Schedule: log.info("Running Job: %s", name) self._run_job(func, data) - def enable_schedule(self, persist=True): + def enable_schedule(self, persist=True, fire_event=True): """ Enable the scheduler. """ self.opts["schedule"]["enabled"] = True - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_enabled_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_enabled_complete", + ) if persist: self.persist() - def disable_schedule(self, persist=True): + def disable_schedule(self, persist=True, fire_event=True): """ Disable the scheduler. """ self.opts["schedule"]["enabled"] = False - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_disabled_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_disabled_complete", + ) if persist: self.persist() @@ -554,7 +575,7 @@ class Schedule: schedule = schedule["schedule"] self.opts.setdefault("schedule", {}).update(schedule) - def list(self, where): + def list(self, where, fire_event=True): """ List the current schedule items """ @@ -565,24 +586,32 @@ class Schedule: else: schedule = self._get_schedule() - # Fire the complete event back along with the list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": schedule}, - tag="/salt/minion/minion_schedule_list_complete", - ) + if fire_event: + # Fire the complete event back along with the list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": schedule}, + tag="/salt/minion/minion_schedule_list_complete", + ) - def save_schedule(self): + def save_schedule(self, fire_event=True): """ Save the current schedule """ self.persist() - # Fire the complete event back along with the list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event({"complete": True}, tag="/salt/minion/minion_schedule_saved") + if fire_event: + # Fire the complete event back along with the list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True}, tag="/salt/minion/minion_schedule_saved" + ) - def postpone_job(self, name, data): + def postpone_job(self, name, data, fire_event=True): """ Postpone a job in the scheduler. Ignores jobs from pillar @@ -608,14 +637,17 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_postpone_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_postpone_job_complete", + ) - def skip_job(self, name, data): + def skip_job(self, name, data, fire_event=True): """ Skip a job at a specific time in the scheduler. Ignores jobs from pillar @@ -634,14 +666,17 @@ class Schedule: elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_skip_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_skip_job_complete", + ) - def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S"): + def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S", fire_event=True): """ Return the next fire time for the specified job """ @@ -653,12 +688,15 @@ class Schedule: if _next_fire_time: _next_fire_time = _next_fire_time.strftime(fmt) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "next_fire_time": _next_fire_time}, - tag="/salt/minion/minion_schedule_next_fire_time_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "next_fire_time": _next_fire_time}, + tag="/salt/minion/minion_schedule_next_fire_time_complete", + ) def job_status(self, name, fire_event=False): """ diff --git a/salt/utils/win_reg.py b/salt/utils/win_reg.py index cde01a9556a..74aa17b5d81 100644 --- a/salt/utils/win_reg.py +++ b/salt/utils/win_reg.py @@ -527,6 +527,7 @@ def read_value(hive, key, vname=None, use_32bit_registry=False): "key": local_key, "vname": local_vname, "vdata": None, + "vtype": None, "success": True, } diff --git a/tests/pytests/functional/cli/test_salt.py b/tests/pytests/functional/cli/test_salt.py index cc7fa703859..8b9468ff068 100644 --- a/tests/pytests/functional/cli/test_salt.py +++ b/tests/pytests/functional/cli/test_salt.py @@ -1,8 +1,35 @@ +import logging import os +import shutil import pytest import salt.version +from tests.conftest import CODE_DIR + +log = logging.getLogger(__name__) + + +@pytest.fixture(autouse=True) +def _install_salt_extension(shell): + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + return + + script_name = "salt-pip" + if salt.utils.platform.is_windows(): + script_name += ".exe" + + script_path = CODE_DIR / "artifacts" / "salt" / script_name + assert script_path.exists() + try: + ret = shell.run(str(script_path), "install", "salt-analytics-framework==0.1.0") + assert ret.returncode == 0 + log.info(ret) + yield + finally: + ret = shell.run(str(script_path), "uninstall", "-y", "salt-analytics-framework") + log.info(ret) + shutil.rmtree(script_path.parent / "extras-3.10", ignore_errors=True) @pytest.mark.windows_whitelisted @@ -52,5 +79,10 @@ def test_versions_report(salt_cli): assert key in expected_keys expected_keys.remove(key) assert not expected_keys - if os.environ.get("ONEDIR_TESTRUN", "0") == "1": - assert "relenv" in ret_dict["Dependency Versions"] + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + # Stop any more testing + return + + assert "relenv" in ret_dict["Dependency Versions"] + assert "Salt Extensions" in ret_dict + assert "salt-analytics-framework" in ret_dict["Salt Extensions"] diff --git a/tests/pytests/functional/cli/test_salt_deltaproxy.py b/tests/pytests/functional/cli/test_salt_deltaproxy.py new file mode 100644 index 00000000000..5bc7604c84a --- /dev/null +++ b/tests/pytests/functional/cli/test_salt_deltaproxy.py @@ -0,0 +1,225 @@ +""" +:codeauthor: Gareth J. Greenaway (ggreenaway@vmware.com) +""" + +import logging +import os +import random + +import pytest +from saltfactories.utils import random_string + +import salt.defaults.exitcodes +from tests.support.helpers import PRE_PYTEST_SKIP_REASON + +log = logging.getLogger(__name__) + + +pytestmark = [ + pytest.mark.skip_on_spawning_platform( + reason="Deltaproxy minions do not currently work on spawning platforms.", + ), + pytest.mark.core_test, +] + + +@pytest.fixture(scope="package") +def salt_master(salt_factories): + config_defaults = { + "open_mode": True, + } + salt_master = salt_factories.salt_master_daemon( + "deltaproxy-functional-master", defaults=config_defaults + ) + with salt_master.started(): + yield salt_master + + +@pytest.fixture(scope="package") +def salt_cli(salt_master): + """ + The ``salt`` CLI as a fixture against the running master + """ + assert salt_master.is_running() + return salt_master.salt_cli(timeout=30) + + +@pytest.fixture(scope="package", autouse=True) +def skip_on_tcp_transport(request): + if request.config.getoption("--transport") == "tcp": + pytest.skip("Deltaproxy under the TPC transport is not working. See #61367") + + +@pytest.fixture +def proxy_minion_id(salt_master): + _proxy_minion_id = random_string("proxy-minion-") + + try: + yield _proxy_minion_id + finally: + # Remove stale key if it exists + pytest.helpers.remove_stale_minion_key(salt_master, _proxy_minion_id) + + +def clear_proxy_minions(salt_master, proxy_minion_id): + for proxy in [proxy_minion_id, "dummy_proxy_one", "dummy_proxy_two"]: + pytest.helpers.remove_stale_minion_key(salt_master, proxy) + + cachefile = os.path.join( + salt_master.config["cachedir"], "{}.cache".format(proxy) + ) + if os.path.exists(cachefile): + os.unlink(cachefile) + + +# Hangs on Windows. You can add a timeout to the proxy.run command, but then +# it just times out. +@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON) +@pytest.mark.parametrize( + "parallel_startup", + [True, False], + ids=["parallel_startup=True", "parallel_startup=False"], +) +def test_exit_status_correct_usage_large_number_of_minions( + salt_master, + salt_cli, + proxy_minion_id, + parallel_startup, +): + """ + Ensure the salt-proxy control proxy starts and + is able to respond to test.ping, additionally ensure that + the proxies being controlled also respond to test.ping. + + Finally ensure correct exit status when salt-proxy exits correctly. + + Skip on Windows because daemonization not supported + """ + + config_defaults = { + "metaproxy": "deltaproxy", + } + sub_proxies = [ + "proxy_one", + "proxy_two", + "proxy_three", + "proxy_four", + "proxy_five", + "proxy_six", + "proxy_seven", + "proxy_eight", + "proxy_nine", + "proxy_ten", + "proxy_eleven", + "proxy_twelve", + "proxy_thirteen", + "proxy_fourteen", + "proxy_fifteen", + "proxy_sixteen", + "proxy_seventeen", + "proxy_eighteen", + "proxy_nineteen", + "proxy_twenty", + "proxy_twenty_one", + "proxy_twenty_two", + "proxy_twenty_three", + "proxy_twenty_four", + "proxy_twenty_five", + "proxy_twenty_six", + "proxy_twenty_seven", + "proxy_twenty_eight", + "proxy_twenty_nine", + "proxy_thirty", + "proxy_thirty_one", + "proxy_thirty_two", + ] + + top_file = """ + base: + {control}: + - controlproxy + """.format( + control=proxy_minion_id, + ) + controlproxy_pillar_file = """ + proxy: + proxytype: deltaproxy + parallel_startup: {} + ids: + """.format( + parallel_startup + ) + + dummy_proxy_pillar_file = """ + proxy: + proxytype: dummy + """ + + for minion_id in sub_proxies: + top_file += """ + {minion_id}: + - dummy""".format( + minion_id=minion_id, + ) + + controlproxy_pillar_file += """ + - {} + """.format( + minion_id, + ) + + top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) + controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( + "controlproxy.sls", controlproxy_pillar_file + ) + dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( + "dummy.sls", + dummy_proxy_pillar_file, + ) + with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: + with salt_master.started(): + assert salt_master.is_running() + + factory = salt_master.salt_proxy_minion_daemon( + proxy_minion_id, + defaults=config_defaults, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + start_timeout=240, + ) + + for minion_id in [proxy_minion_id] + sub_proxies: + factory.before_start( + pytest.helpers.remove_stale_proxy_minion_cache_file, + factory, + minion_id, + ) + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_proxy_minion_cache_file, + factory, + minion_id, + ) + + with factory.started(): + assert factory.is_running() + + # Let's issue a ping the control proxy + ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + assert ret.returncode == 0 + assert ret.data is True + + for minion_id in random.sample(sub_proxies, 4): + # Let's issue a ping to one of the controlled proxies + ret = salt_cli.run("test.ping", minion_tgt=minion_id) + assert ret.returncode == 0 + assert ret.data is True + + # Terminate the proxy minion + ret = factory.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret + + # Terminate the salt master + ret = salt_master.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret diff --git a/tests/pytests/functional/states/file/test_cached.py b/tests/pytests/functional/states/file/test_cached.py new file mode 100644 index 00000000000..1b052382071 --- /dev/null +++ b/tests/pytests/functional/states/file/test_cached.py @@ -0,0 +1,96 @@ +import secrets + +import pytest + +import salt.states.file as file +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture +def configure_loader_modules(): + return { + file: {"__opts__": {"test": False}}, + } + + +def test_cached_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "File will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value=""), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "File already cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=source_hash), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_different_hash_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + existing_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "Hashes don't match.\nFile will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=existing_hash), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_no_source_hash_test_true(): + name = "salt://test/file.exe" + existing_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "No hash found. File will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=existing_hash), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name) + assert result == expected diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py index 4a74802a0d0..46a3b2fe0e5 100644 --- a/tests/pytests/functional/transport/server/test_req_channel.py +++ b/tests/pytests/functional/transport/server/test_req_channel.py @@ -11,8 +11,6 @@ import salt.config import salt.exceptions import salt.ext.tornado.gen import salt.master -import salt.transport.client -import salt.transport.server import salt.utils.platform import salt.utils.process import salt.utils.stringutils diff --git a/tests/pytests/integration/ssh/test_terraform.py b/tests/pytests/integration/ssh/test_terraform.py new file mode 100644 index 00000000000..12194a48bae --- /dev/null +++ b/tests/pytests/integration/ssh/test_terraform.py @@ -0,0 +1,92 @@ +import textwrap + +import pytest + +import salt.utils.platform +from tests.support.runtests import RUNTIME_VARS + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module") +def minion_id(): + return "terraform_ssh_minion" + + +@pytest.fixture(scope="module") +def terraform_roster_file(sshd_server, salt_master, tmp_path_factory, minion_id): + darwin_addon = "" + if salt.utils.platform.is_darwin(): + darwin_addon = ',\n "set_path": "$PATH:/usr/local/bin/"\n' + roster_contents = textwrap.dedent( + """ {{ + "version": 4, + "terraform_version": "1.4.3", + "serial": 1, + "outputs": {{}}, + "resources": [ + {{ + "mode": "managed", + "type": "salt_host", + "name": "{minion}", + "instances": [ + {{ + "schema_version": 0, + "attributes": {{ + "cmd_umask": null, + "host": "localhost", + "id": "{minion}", + "minion_opts": null, + "passwd": "", + "port": {port}, + "priv": null, + "salt_id": "{minion}", + "sudo": null, + "sudo_user": null, + "thin_dir": null, + "timeout": null, + "tty": null, + "user": "{user}"{darwin_addon} + }} + }} + ] + }} + ], + "check_results": null + }} + """ + ).format( + minion=minion_id, + port=sshd_server.listen_port, + user=RUNTIME_VARS.RUNNING_TESTS_USER, + darwin_addon=darwin_addon, + ) + roster_file = tmp_path_factory.mktemp("terraform_roster") / "terraform.tfstate" + roster_file.write_text(roster_contents) + yield roster_file + roster_file.unlink() + + +@pytest.fixture(scope="module") +def salt_ssh_cli(salt_master, terraform_roster_file, sshd_config_dir): + """ + The ``salt-ssh`` CLI as a fixture against the running master + """ + assert salt_master.is_running() + return salt_master.salt_ssh_cli( + roster_file=terraform_roster_file, + target_host="*", + client_key=str(sshd_config_dir / "client_key"), + base_script_args=["--ignore-host-keys"], + ) + + +def test_terraform_roster(salt_ssh_cli, minion_id): + """ + Test that the terraform roster operates as intended + """ + ret = salt_ssh_cli.run("--roster=terraform", "test.ping") + assert ret.data.get(minion_id) is True diff --git a/tests/pytests/unit/cli/test_ssh.py b/tests/pytests/unit/cli/test_ssh.py new file mode 100644 index 00000000000..3cc4a5c0419 --- /dev/null +++ b/tests/pytests/unit/cli/test_ssh.py @@ -0,0 +1,16 @@ +from salt.cli.ssh import SaltSSH +from tests.support.mock import MagicMock, call, patch + + +def test_fsclient_destroy_called(minion_opts): + """ + Test that `salt.client.ssh.SSH.fsclient.destroy()` is called. + """ + ssh_mock = MagicMock() + with patch( + "salt.utils.parsers.SaltSSHOptionParser.parse_args", return_value=MagicMock() + ), patch("salt.client.ssh.SSH", return_value=ssh_mock): + parser = SaltSSH() + parser.config = minion_opts + parser.run() + assert ssh_mock.fsclient.mock_calls == [call.destroy()] diff --git a/tests/pytests/unit/cloud/test_cloud.py b/tests/pytests/unit/cloud/test_cloud.py index 303374a3715..bd8595dcf86 100644 --- a/tests/pytests/unit/cloud/test_cloud.py +++ b/tests/pytests/unit/cloud/test_cloud.py @@ -1,6 +1,7 @@ import pytest from salt.cloud import Cloud +from salt.exceptions import SaltCloudSystemExit from tests.support.mock import MagicMock, patch @@ -123,3 +124,25 @@ def test_vm_config_merger(): } vm = Cloud.vm_config("test_vm", main, provider, profile, {}) assert expected == vm + + +def test_cloud_run_profile_create_returns_boolean(master_config): + + master_config["profiles"] = {"test_profile": {"provider": "test_provider:saltify"}} + master_config["providers"] = { + "test_provider": { + "saltify": {"profiles": {"provider": "test_provider:saltify"}} + } + } + master_config["show_deploy_args"] = False + + cloud = Cloud(master_config) + with patch.object(cloud, "create", return_value=True): + ret = cloud.run_profile("test_profile", ["test_vm"]) + assert ret == {"test_vm": True} + + cloud = Cloud(master_config) + with patch.object(cloud, "create", return_value=False): + with pytest.raises(SaltCloudSystemExit): + ret = cloud.run_profile("test_profile", ["test_vm"]) + assert ret == {"test_vm": False} diff --git a/tests/pytests/unit/modules/test_reg.py b/tests/pytests/unit/modules/test_reg.py new file mode 100644 index 00000000000..480af192086 --- /dev/null +++ b/tests/pytests/unit/modules/test_reg.py @@ -0,0 +1,827 @@ +import pytest +from saltfactories.utils import random_string + +import salt.modules.reg as reg +import salt.utils.stringutils +import salt.utils.win_reg +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + +try: + import win32api + + HAS_WIN32 = True +except ImportError: + HAS_WIN32 = False + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.skipif(HAS_WIN32 is False, reason="Tests require win32 libraries"), +] + + +UNICODE_KEY = "Unicode Key \N{TRADE MARK SIGN}" +UNICODE_VALUE = ( + "Unicode Value \N{COPYRIGHT SIGN},\N{TRADE MARK SIGN},\N{REGISTERED SIGN}" +) +FAKE_KEY = "SOFTWARE\\{}".format(random_string("SaltTesting-", lowercase=False)) + + +@pytest.fixture +def configure_loader_modules(): + return { + reg: { + "__utils__": { + "reg.delete_value": salt.utils.win_reg.delete_value, + "reg.delete_key_recursive": salt.utils.win_reg.delete_key_recursive, + "reg.key_exists": salt.utils.win_reg.key_exists, + "reg.list_keys": salt.utils.win_reg.list_keys, + "reg.list_values": salt.utils.win_reg.list_values, + "reg.read_value": salt.utils.win_reg.read_value, + "reg.set_value": salt.utils.win_reg.set_value, + "reg.value_exists": salt.utils.win_reg.value_exists, + } + } + } + + +def test_key_exists_existing(): + """ + Tests the key_exists function using a well known registry key + """ + assert reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_key_exists_non_existing(): + """ + Tests the key_exists function using a non existing registry key + """ + assert not reg.key_exists(hive="HKLM", key=FAKE_KEY) + + +def test_key_exists_invalid_hive(): + """ + Tests the key_exists function using an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.key_exists(hive="BADHIVE", key="SOFTWARE\\Microsoft") + + +def test_key_exists_unknown_key_error(): + """ + Tests the key_exists function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_value_exists_existing(): + """ + Tests the value_exists function using a well known registry key + """ + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + assert result + + +def test_value_exists_non_existing(): + """ + Tests the value_exists function using a non existing registry key + """ + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="NonExistingValueName", + ) + assert not result + + +def test_value_exists_invalid_hive(): + """ + Tests the value_exists function using an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.value_exists( + hive="BADHIVE", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + + +def test_value_exists_key_not_exist(): + """ + Tests the value_exists function when the key does not exist + """ + mock_error = MagicMock( + side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + assert not result + + +def test_value_exists_unknown_key_error(): + """ + Tests the value_exists function with an unknown error when opening the + key + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="CommonFilesDir", + ) + + +def test_value_exists_empty_default_value(): + """ + Tests the value_exists function when querying the default value + """ + mock_error = MagicMock( + side_effect=win32api.error(2, "RegQueryValueEx", "Empty Value") + ) + with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname=None, + ) + assert result + + +def test_value_exists_no_vname(): + """ + Tests the value_exists function when the vname does not exist + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegQueryValueEx", "Empty Value") + ) + with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): + result = reg.value_exists( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="NonExistingValuePair", + ) + assert not result + + +def test_list_keys_existing(): + """ + Test the list_keys function using a well known registry key + """ + assert "Microsoft" in reg.list_keys(hive="HKLM", key="SOFTWARE") + + +def test_list_keys_non_existing(): + """ + Test the list_keys function using a non existing registry key + """ + expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) + result = reg.list_keys(hive="HKLM", key=FAKE_KEY) + assert result == expected + + +def test_list_keys_invalid_hive(): + """ + Test the list_keys function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.list_keys(hive="BADHIVE", key="SOFTWARE\\Microsoft") + + +def test_list_keys_unknown_key_error(): + """ + Tests the list_keys function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.list_keys(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_list_values_existing(): + """ + Test the list_values function using a well known registry key + """ + values = reg.list_values( + hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" + ) + keys = [] + for value in values: + keys.append(value["vname"]) + assert "ProgramFilesDir" in keys + + +def test_list_values_non_existing(): + """ + Test the list_values function using a non existing registry key + """ + expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) + result = reg.list_values(hive="HKLM", key=FAKE_KEY) + assert result == expected + + +def test_list_values_invalid_hive(): + """ + Test the list_values function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.list_values(hive="BADHIVE", key="SOFTWARE\\Microsoft") + + +def test_list_values_unknown_key_error(): + """ + Tests the list_values function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.list_values(hive="HKLM", key="SOFTWARE\\Microsoft") + + +def test_read_value_existing(): + """ + Test the read_value function using a well known registry value + """ + ret = reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="ProgramFilesPath", + ) + assert ret["vdata"] == "%ProgramFiles%" + + +def test_read_value_default(): + """ + Test the read_value function reading the default value using a well + known registry key + """ + ret = reg.read_value( + hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" + ) + assert ret["vdata"] == "(value not set)" + + +def test_read_value_non_existing(): + """ + Test the read_value function using a non existing value pair + """ + expected = { + "comment": ( + "Cannot find fake_name in HKLM\\SOFTWARE\\Microsoft\\" + "Windows\\CurrentVersion" + ), + "vdata": None, + "vtype": None, + "vname": "fake_name", + "success": False, + "hive": "HKLM", + "key": "SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + } + result = reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="fake_name", + ) + assert result == expected + + +def test_read_value_non_existing_key(): + """ + Test the read_value function using a non existing registry key + """ + expected = { + "comment": "Cannot find key: HKLM\\{}".format(FAKE_KEY), + "vdata": None, + "vtype": None, + "vname": "fake_name", + "success": False, + "hive": "HKLM", + "key": FAKE_KEY, + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + assert result == expected + + +def test_read_value_invalid_hive(): + """ + Test the read_value function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.read_value( + hive="BADHIVE", + key="SOFTWARE\\Microsoft", + vname="ProgramFilesPath", + ) + + +def test_read_value_unknown_key_error(): + """ + Tests the read_value function with an unknown key error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="ProgramFilesPath", + ) + + +def test_read_value_unknown_value_error(): + """ + Tests the read_value function with an unknown value error + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegQueryValueEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): + with pytest.raises(win32api.error): + reg.read_value( + hive="HKLM", + key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", + vname="ProgramFilesPath", + ) + + +@pytest.mark.destructive_test +def test_read_value_multi_sz_empty_list(): + """ + An empty REG_MULTI_SZ value should return an empty list, not None + """ + try: + assert reg.set_value( + hive="HKLM", + key=FAKE_KEY, + vname="empty_list", + vdata=[], + vtype="REG_MULTI_SZ", + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": [], + "vname": "empty_list", + "vtype": "REG_MULTI_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="empty_list") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value(): + """ + Test the set_value function + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": "fake_data", + "vname": "fake_name", + "vtype": "REG_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_default(): + """ + Test the set_value function on the default value + """ + try: + assert reg.set_value(hive="HKLM", key=FAKE_KEY, vdata="fake_default_data") + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": "fake_default_data", + "vname": "(Default)", + "vtype": "REG_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_unicode_key(): + """ + Test the set_value function on a unicode key + """ + try: + assert reg.set_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + vdata="fake_value", + ) + expected = { + "hive": "HKLM", + "key": "\\".join([FAKE_KEY, UNICODE_KEY]), + "success": True, + "vdata": "fake_value", + "vname": "fake_name", + "vtype": "REG_SZ", + } + result = reg.read_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + ) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_unicode_value(): + """ + Test the set_value function on a unicode value + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": UNICODE_VALUE, + "vname": "fake_unicode", + "vtype": "REG_SZ", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_reg_dword(): + """ + Test the set_value function on a REG_DWORD value + """ + try: + assert reg.set_value( + hive="HKLM", + key=FAKE_KEY, + vname="dword_value", + vdata=123, + vtype="REG_DWORD", + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": 123, + "vname": "dword_value", + "vtype": "REG_DWORD", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="dword_value") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_set_value_reg_qword(): + """ + Test the set_value function on a REG_QWORD value + """ + try: + assert reg.set_value( + hive="HKLM", + key=FAKE_KEY, + vname="qword_value", + vdata=123, + vtype="REG_QWORD", + ) + expected = { + "hive": "HKLM", + "key": FAKE_KEY, + "success": True, + "vdata": 123, + "vname": "qword_value", + "vtype": "REG_QWORD", + } + result = reg.read_value(hive="HKLM", key=FAKE_KEY, vname="qword_value") + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_set_value_invalid_hive(): + """ + Test the set_value function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.set_value( + hive="BADHIVE", + key=FAKE_KEY, + vname="fake_name", + vdata="fake_data", + ) + + +def test_set_value_open_create_failure(): + """ + Test the set_value function when there is a problem opening/creating + the key + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegCreateKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegCreateKeyEx", mock_error): + result = reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + assert not result + + +def test_set_value_type_error(): + """ + Test the set_value function when the wrong type of data is passed + """ + mock_error = MagicMock(side_effect=TypeError("Mocked TypeError")) + with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): + assert not reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + + +def test_set_value_system_error(): + """ + Test the set_value function when a SystemError occurs while setting the + value + """ + mock_error = MagicMock(side_effect=SystemError("Mocked SystemError")) + with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): + assert not reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + + +def test_set_value_value_error(): + """ + Test the set_value function when a ValueError occurs while setting the + value + """ + mock_error = MagicMock(side_effect=ValueError("Mocked ValueError")) + with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): + assert not reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + + +@pytest.mark.destructive_test +def test_delete_value(): + """ + Test the delete_value function + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" + ) + assert reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_delete_value_non_existing(): + """ + Test the delete_value function on non existing value + """ + mock_error = MagicMock( + side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + result = reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") + assert result is None + + +def test_delete_value_invalid_hive(): + """ + Test the delete_value function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.delete_value(hive="BADHIVE", key=FAKE_KEY, vname="fake_name") + + +def test_delete_value_unknown_error(): + """ + Test the delete_value function when there is a problem opening the key + """ + mock_error = MagicMock( + side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") + ) + with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): + with pytest.raises(win32api.error): + reg.delete_value( + hive="HKLM", + key=FAKE_KEY, + vname="fake_name", + ) + + +@pytest.mark.destructive_test +def test_delete_value_unicode(): + """ + Test the delete_value function on a unicode value + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE + ) + assert reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode") + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_value_unicode_vname(): + """ + Test the delete_value function on a unicode vname + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY, vdata="junk data" + ) + assert reg.delete_value(hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY) + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_value_unicode_key(): + """ + Test the delete_value function on a unicode key + """ + try: + assert reg.set_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + vdata="junk data", + ) + assert reg.delete_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + ) + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_delete_key_recursive_invalid_hive(): + """ + Test the delete_key_recursive function when passing an invalid hive + """ + with pytest.raises(CommandExecutionError): + reg.delete_key_recursive(hive="BADHIVE", key=FAKE_KEY) + + +def test_delete_key_recursive_key_not_found(): + """ + Test the delete_key_recursive function when the passed key to delete is + not found. + """ + assert not reg.key_exists(hive="HKLM", key=FAKE_KEY) + assert not reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +def test_delete_key_recursive_too_close(): + """ + Test the delete_key_recursive function when the passed key to delete is + too close to root, such as + """ + mock_true = MagicMock(return_value=True) + with patch("salt.utils.win_reg.key_exists", mock_true): + assert not reg.delete_key_recursive(hive="HKLM", key="FAKE_KEY") + + +@pytest.mark.destructive_test +def test_delete_key_recursive(): + """ + Test the delete_key_recursive function + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" + ) + expected = {"Deleted": ["\\".join(["HKLM", FAKE_KEY])], "Failed": []} + result = reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_key_recursive_failed_to_open_key(): + """ + Test the delete_key_recursive function on failure to open the key + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" + ) + expected = { + "Deleted": [], + "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Failed to connect to key"], + } + mock_true = MagicMock(return_value=True) + mock_error = MagicMock( + side_effect=[ + 1, + win32api.error(3, "RegOpenKeyEx", "Failed to connect to key"), + ] + ) + with patch("salt.utils.win_reg.key_exists", mock_true), patch( + "salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error + ): + result = reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_key_recursive_failed_to_delete(): + """ + Test the delete_key_recursive function on failure to delete a key + """ + try: + assert reg.set_value( + hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" + ) + expected = { + "Deleted": [], + "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Unknown error"], + } + # pylint: disable=undefined-variable + mock_error = MagicMock(side_effect=WindowsError("Unknown error")) + # pylint: enable=undefined-variable + with patch("salt.utils.win_reg.win32api.RegDeleteKey", mock_error): + result = reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) + + +@pytest.mark.destructive_test +def test_delete_key_recursive_unicode(): + """ + Test the delete_key_recursive function on value within a unicode key + """ + try: + assert reg.set_value( + hive="HKLM", + key="\\".join([FAKE_KEY, UNICODE_KEY]), + vname="fake_name", + vdata="fake_value", + ) + expected = { + "Deleted": ["\\".join(["HKLM", FAKE_KEY, UNICODE_KEY])], + "Failed": [], + } + result = reg.delete_key_recursive( + hive="HKLM", key="\\".join([FAKE_KEY, UNICODE_KEY]) + ) + assert result == expected + finally: + reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index 889543c9454..97527d3dc24 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -2,13 +2,13 @@ import pytest import salt.modules.saltutil as saltutil from salt.client import LocalClient -from tests.support.mock import create_autospec +from tests.support.mock import create_autospec, patch from tests.support.mock import sentinel as s @pytest.fixture def configure_loader_modules(): - return {saltutil: {}} + return {saltutil: {"__opts__": {"file_client": "local"}}} def test_exec_kwargs(): @@ -82,3 +82,57 @@ def test_exec_kwargs(): **{"subset": s.subset, "batch": s.batch} ) client.cmd_batch.assert_called_with(batch=s.batch, **_cmd_expected_kwargs) + + +def test_refresh_grains_default_clean_pillar_cache(): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.refresh_grains() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_refresh_grains_clean_pillar_cache(): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.refresh_grains(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_grains_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_grains() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_grains_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_grains(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_pillar_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_pillar() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_pillar_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_pillar(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_all_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_all() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_all_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_all(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) diff --git a/tests/pytests/unit/modules/test_win_lgpo_reg.py b/tests/pytests/unit/modules/test_win_lgpo_reg.py index 6d4a824b308..04284ee2727 100644 --- a/tests/pytests/unit/modules/test_win_lgpo_reg.py +++ b/tests/pytests/unit/modules/test_win_lgpo_reg.py @@ -30,7 +30,7 @@ def configure_loader_modules(): @pytest.fixture -def empty_reg_pol(): +def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) if not reg_pol_file.parent.exists(): @@ -47,7 +47,24 @@ def empty_reg_pol(): @pytest.fixture -def reg_pol(): +def empty_reg_pol_user(): + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) + if not reg_pol_file.parent.exists(): + reg_pol_file.parent.mkdir(parents=True) + with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + +@pytest.fixture +def reg_pol_mach(): data_to_write = { "SOFTWARE\\MyKey1": { "MyValue1": { @@ -90,43 +107,75 @@ def reg_pol(): f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) -def test_read_reg_pol(empty_reg_pol): - expected = {} - result = lgpo_reg.read_reg_pol() - assert result == expected - - -def test_read_reg_pol_invalid_policy_class(): - pytest.raises(SaltInvocationError, lgpo_reg.read_reg_pol, policy_class="Invalid") - - -def test_write_reg_pol(empty_reg_pol): +@pytest.fixture +def reg_pol_user(): data_to_write = { - r"SOFTWARE\MyKey": { - "MyValue": { - "data": "string", + "SOFTWARE\\MyKey1": { + "MyValue1": { + "data": "squidward", + "type": "REG_SZ", + }, + "**del.MyValue2": { + "data": " ", "type": "REG_SZ", }, }, + "SOFTWARE\\MyKey2": { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + }, } - lgpo_reg.write_reg_pol(data_to_write) - result = lgpo_reg.read_reg_pol() - assert result == data_to_write + lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = class_info["User"]["policy_path"] + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) -def test_write_reg_pol_invalid_policy_class(): +def test_invalid_policy_class_delete_value(): pytest.raises( - SaltInvocationError, lgpo_reg.write_reg_pol, data={}, policy_class="Invalid" + SaltInvocationError, + lgpo_reg.delete_value, + key="", + v_name="", + policy_class="Invalid", ) -def test_get_value(reg_pol): - expected = {"data": "squidward", "type": "REG_SZ"} - result = lgpo_reg.get_value(key="SOFTWARE\\MyKey1", v_name="MyValue1") - assert result == expected +def test_invalid_policy_class_disable_value(): + pytest.raises( + SaltInvocationError, + lgpo_reg.disable_value, + key="", + v_name="", + policy_class="Invalid", + ) -def test_get_value_invalid_policy_class(): +def test_invalid_policy_class_get_key(): + pytest.raises(SaltInvocationError, lgpo_reg.get_key, key="", policy_class="Invalid") + + +def test_invalid_policy_class_get_value(): pytest.raises( SaltInvocationError, lgpo_reg.get_value, @@ -136,73 +185,11 @@ def test_get_value_invalid_policy_class(): ) -def test_get_key(reg_pol): - expected = { - "MyValue3": { - "data": ["spongebob", "squarepants"], - "type": "REG_MULTI_SZ", - }, - } - result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2") - assert result == expected +def test_invalid_policy_class_read_reg_pol(): + pytest.raises(SaltInvocationError, lgpo_reg.read_reg_pol, policy_class="Invalid") -def test_get_key_invalid_policy_class(): - pytest.raises(SaltInvocationError, lgpo_reg.get_key, key="", policy_class="Invalid") - - -def test_set_value(empty_reg_pol): - key = "SOFTWARE\\MyKey" - v_name = "MyValue" - # Test command return - result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") - assert result is True - # Test value actually set in Registry.pol - expected = {"data": 1, "type": "REG_DWORD"} - result = lgpo_reg.get_value(key=key, v_name=v_name) - assert result == expected - # Test that the registry value has been set - expected = { - "hive": "HKLM", - "key": key, - "vname": v_name, - "vdata": 1, - "vtype": "REG_DWORD", - "success": True, - } - result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) - assert result == expected - - -def test_set_value_existing_change(reg_pol): - expected = {"data": 1, "type": "REG_DWORD"} - key = "SOFTWARE\\MyKey" - v_name = "MyValue1" - lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") - result = lgpo_reg.get_value(key=key, v_name=v_name) - assert result == expected - expected = { - "hive": "HKLM", - "key": key, - "vname": v_name, - "vdata": 1, - "vtype": "REG_DWORD", - "success": True, - } - result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) - assert result == expected - - -def test_set_value_existing_no_change(reg_pol): - expected = {"data": "squidward", "type": "REG_SZ"} - key = "SOFTWARE\\MyKey" - v_name = "MyValue1" - lgpo_reg.set_value(key=key, v_name=v_name, v_data="squidward", v_type="REG_SZ") - result = lgpo_reg.get_value(key=key, v_name=v_name) - assert result == expected - - -def test_set_value_invalid_policy_class(): +def test_invalid_policy_class_set_value(): pytest.raises( SaltInvocationError, lgpo_reg.set_value, @@ -213,6 +200,12 @@ def test_set_value_invalid_policy_class(): ) +def test_invalid_policy_class_write_reg_pol(): + pytest.raises( + SaltInvocationError, lgpo_reg.write_reg_pol, data={}, policy_class="Invalid" + ) + + def test_set_value_invalid_reg_type(): pytest.raises( SaltInvocationError, @@ -252,7 +245,95 @@ def test_set_value_invalid_reg_dword(): ) -def test_disable_value(reg_pol): +def test_mach_read_reg_pol(empty_reg_pol_mach): + expected = {} + result = lgpo_reg.read_reg_pol() + assert result == expected + + +def test_mach_write_reg_pol(empty_reg_pol_mach): + data_to_write = { + r"SOFTWARE\MyKey": { + "MyValue": { + "data": "string", + "type": "REG_SZ", + }, + }, + } + lgpo_reg.write_reg_pol(data_to_write) + result = lgpo_reg.read_reg_pol() + assert result == data_to_write + + +def test_mach_get_value(reg_pol_mach): + expected = {"data": "squidward", "type": "REG_SZ"} + result = lgpo_reg.get_value(key="SOFTWARE\\MyKey1", v_name="MyValue1") + assert result == expected + + +def test_mach_get_key(reg_pol_mach): + expected = { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + } + result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2") + assert result == expected + + +def test_mach_set_value(empty_reg_pol_mach): + key = "SOFTWARE\\MyKey" + v_name = "MyValue" + # Test command return + result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") + assert result is True + # Test value actually set in Registry.pol + expected = {"data": 1, "type": "REG_DWORD"} + result = lgpo_reg.get_value(key=key, v_name=v_name) + assert result == expected + # Test that the registry value has been set + expected = { + "hive": "HKLM", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) + assert result == expected + + +def test_mach_set_value_existing_change(reg_pol_mach): + expected = {"data": 1, "type": "REG_DWORD"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") + result = lgpo_reg.get_value(key=key, v_name=v_name) + assert result == expected + expected = { + "hive": "HKLM", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKLM", key=key, vname=v_name) + assert result == expected + + +def test_mach_set_value_existing_no_change(reg_pol_mach): + expected = {"data": "squidward", "type": "REG_SZ"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="squidward", v_type="REG_SZ") + result = lgpo_reg.get_value(key=key, v_name=v_name) + assert result == expected + + +def test_mach_disable_value(reg_pol_mach): key = "SOFTWARE\\MyKey1" # Test that the command completed successfully result = lgpo_reg.disable_value(key=key, v_name="MyValue1") @@ -269,7 +350,7 @@ def test_disable_value(reg_pol): assert result is False -def test_disable_value_no_change(reg_pol): +def test_mach_disable_value_no_change(reg_pol_mach): expected = { "MyValue1": {"data": "squidward", "type": "REG_SZ"}, "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, @@ -280,17 +361,7 @@ def test_disable_value_no_change(reg_pol): assert result == expected -def test_disable_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.disable_value, - key="", - v_name="", - policy_class="Invalid", - ) - - -def test_delete_value_existing(reg_pol): +def test_mach_delete_value_existing(reg_pol_mach): key = "SOFTWARE\\MyKey1" # Test that the command completes successfully result = lgpo_reg.delete_value(key=key, v_name="MyValue1") @@ -309,7 +380,7 @@ def test_delete_value_existing(reg_pol): assert result is False -def test_delete_value_no_change(empty_reg_pol): +def test_mach_delete_value_no_change(empty_reg_pol_mach): expected = {} key = "SOFTWARE\\MyKey1" lgpo_reg.delete_value(key=key, v_name="MyValue2") @@ -317,11 +388,159 @@ def test_delete_value_no_change(empty_reg_pol): assert result == expected -def test_delete_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.delete_value, - key="", - v_name="", - policy_class="Invalid", +def test_user_read_reg_pol(empty_reg_pol_user): + expected = {} + result = lgpo_reg.read_reg_pol(policy_class="User") + assert result == expected + + +def test_user_write_reg_pol(empty_reg_pol_user): + data_to_write = { + r"SOFTWARE\MyKey": { + "MyValue": { + "data": "string", + "type": "REG_SZ", + }, + }, + } + lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + result = lgpo_reg.read_reg_pol(policy_class="User") + assert result == data_to_write + + +def test_user_get_value(reg_pol_user): + expected = {"data": "squidward", "type": "REG_SZ"} + result = lgpo_reg.get_value( + key="SOFTWARE\\MyKey1", + v_name="MyValue1", + policy_class="User", ) + assert result == expected + + +def test_user_get_key(reg_pol_user): + expected = { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + } + result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2", policy_class="User") + assert result == expected + + +def test_user_set_value(empty_reg_pol_user): + key = "SOFTWARE\\MyKey" + v_name = "MyValue" + # Test command return + result = lgpo_reg.set_value( + key=key, + v_name=v_name, + v_data="1", + policy_class="User", + ) + assert result is True + # Test value actually set in Registry.pol + expected = {"data": 1, "type": "REG_DWORD"} + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + # Test that the registry value has been set + expected = { + "hive": "HKCU", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKCU", key=key, vname=v_name) + assert result == expected + + +def test_user_set_value_existing_change(reg_pol_user): + expected = {"data": 1, "type": "REG_DWORD"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="1", policy_class="User") + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + expected = { + "hive": "HKCU", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKCU", key=key, vname=v_name) + assert result == expected + + +def test_user_set_value_existing_no_change(reg_pol_user): + expected = {"data": "squidward", "type": "REG_SZ"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value( + key=key, + v_name=v_name, + v_data="squidward", + v_type="REG_SZ", + policy_class="User", + ) + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + + +def test_user_disable_value(reg_pol_user): + key = "SOFTWARE\\MyKey1" + # Test that the command completed successfully + result = lgpo_reg.disable_value(key=key, v_name="MyValue1", policy_class="User") + assert result is True + # Test that the value was actually set in Registry.pol + expected = { + "**del.MyValue1": {"data": " ", "type": "REG_SZ"}, + "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, + } + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected + # Test that the registry value has been removed + result = salt.utils.win_reg.value_exists(hive="HKCU", key=key, vname="MyValue1") + assert result is False + + +def test_user_disable_value_no_change(reg_pol_user): + expected = { + "MyValue1": {"data": "squidward", "type": "REG_SZ"}, + "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, + } + key = "SOFTWARE\\MyKey1" + lgpo_reg.disable_value(key=key, v_name="MyValue2", policy_class="User") + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected + + +def test_user_delete_value_existing(reg_pol_user): + key = "SOFTWARE\\MyKey1" + # Test that the command completes successfully + result = lgpo_reg.delete_value(key=key, v_name="MyValue1", policy_class="User") + assert result is True + # Test that the value is actually removed from Registry.pol + expected = { + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + } + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected + # Test that the registry entry has been removed + result = salt.utils.win_reg.value_exists(hive="HKCU", key=key, vname="MyValue2") + assert result is False + + +def test_user_delete_value_no_change(empty_reg_pol_user): + expected = {} + key = "SOFTWARE\\MyKey1" + lgpo_reg.delete_value(key=key, v_name="MyValue2", policy_class="User") + result = lgpo_reg.get_key(key=key, policy_class="User") + assert result == expected diff --git a/tests/pytests/unit/roster/test_terraform.py b/tests/pytests/unit/roster/test_terraform.py index 730c640fab2..b79d7985461 100644 --- a/tests/pytests/unit/roster/test_terraform.py +++ b/tests/pytests/unit/roster/test_terraform.py @@ -27,10 +27,6 @@ def pki_dir(): @pytest.fixture def configure_loader_modules(roster_file, pki_dir): - # opts = salt.config.master_config( - # os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "master") - # ) - # utils = salt.loader.utils(opts, whitelist=["roster_matcher"]) return { terraform: { "__utils__": { diff --git a/tests/pytests/unit/states/file/test_managed.py b/tests/pytests/unit/states/file/test_managed.py index 0f5da2dac27..4a826c26869 100644 --- a/tests/pytests/unit/states/file/test_managed.py +++ b/tests/pytests/unit/states/file/test_managed.py @@ -405,3 +405,29 @@ def test_managed_test_mode_user_group_not_present(): ) assert ret["result"] is not False assert "is not available" not in ret["comment"] + + +@pytest.mark.parametrize( + "source,check_result", + [ + ("http://@$@dead_link@$@/src.tar.gz", True), + ("https://@$@dead_link@$@/src.tar.gz", True), + ("ftp://@$@dead_link@$@/src.tar.gz", True), + ("salt://@$@dead_link@$@/src.tar.gz", False), + ("file://@$@dead_link@$@/src.tar.gz", False), + ( + ["http://@$@dead_link@$@/src.tar.gz", "https://@$@dead_link@$@/src.tar.gz"], + True, + ), + ( + ["salt://@$@dead_link@$@/src.tar.gz", "file://@$@dead_link@$@/src.tar.gz"], + False, + ), + ( + ["http://@$@dead_link@$@/src.tar.gz", "file://@$@dead_link@$@/src.tar.gz"], + True, + ), + ], +) +def test_sources_source_hash_check(source, check_result): + assert filestate._http_ftp_check(source) is check_result diff --git a/tests/pytests/unit/states/test_win_lgpo_reg.py b/tests/pytests/unit/states/test_win_lgpo_reg.py index d2ca5cc7433..ea345deae23 100644 --- a/tests/pytests/unit/states/test_win_lgpo_reg.py +++ b/tests/pytests/unit/states/test_win_lgpo_reg.py @@ -1,9 +1,14 @@ +import pathlib + import pytest +import salt.modules.win_file as file import salt.modules.win_lgpo_reg as win_lgpo_reg import salt.states.win_lgpo_reg as lgpo_reg import salt.utils.files +import salt.utils.win_dacl import salt.utils.win_lgpo_reg +import salt.utils.win_reg from tests.support.mock import patch pytestmark = [ @@ -24,23 +29,48 @@ def configure_loader_modules(): "lgpo_reg.disable_value": win_lgpo_reg.disable_value, "lgpo_reg.delete_value": win_lgpo_reg.delete_value, }, + "__utils__": { + "reg.read_value": salt.utils.win_reg.read_value, + }, + }, + file: { + "__utils__": { + "dacl.set_perms": salt.utils.win_dacl.set_perms, + }, }, } @pytest.fixture -def empty_reg_pol(): +def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO - reg_pol_file = class_info["Machine"]["policy_path"] - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) + reg_pol_file.parent.mkdir(parents=True, exist_ok=True) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") yield - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) @pytest.fixture -def reg_pol(): +def empty_reg_pol_user(): + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) + reg_pol_file.parent.mkdir(parents=True, exist_ok=True) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + +@pytest.fixture +def reg_pol_mach(): data_to_write = { r"SOFTWARE\MyKey1": { "MyValue1": { @@ -51,6 +81,10 @@ def reg_pol(): "data": " ", "type": "REG_SZ", }, + "MyValue3": { + "data": 0, + "type": "REG_DWORD", + }, }, r"SOFTWARE\MyKey2": { "MyValue3": { @@ -60,45 +94,174 @@ def reg_pol(): }, } win_lgpo_reg.write_reg_pol(data_to_write) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata=0, + vtype="REG_DWORD", + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) yield + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = class_info["Machine"]["policy_path"] with salt.utils.files.fopen(reg_pol_file, "wb") as f: f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) +@pytest.fixture +def reg_pol_user(): + data_to_write = { + r"SOFTWARE\MyKey1": { + "MyValue1": { + "data": "squidward", + "type": "REG_SZ", + }, + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + "MyValue3": { + "data": 0, + "type": "REG_DWORD", + }, + }, + r"SOFTWARE\MyKey2": { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + }, + } + win_lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata=0, + vtype="REG_DWORD", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = class_info["User"]["policy_path"] + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + def test_virtual_name(): assert lgpo_reg.__virtual__() == "lgpo_reg" -def test_value_present(empty_reg_pol): +def test_machine_value_present(empty_reg_pol_mach): """ - Test value.present + Test value.present in Machine policy """ result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", ) expected = { "changes": { "new": { - "data": 1, - "type": "REG_DWORD", + "pol": { + "data": 1, + "type": "REG_DWORD", + }, + "reg": { + "data": 1, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": {}, + "reg": {}, }, - "old": {}, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue", "result": True, } assert result == expected -def test_value_present_existing_change(reg_pol): +def test_machine_value_present_enforce(reg_pol_mach): """ - Test value.present with existing incorrect value + Issue #64222 + Test value.present in Machine policy when the registry changes after the + state is applied. This would cause a discrepancy between the registry + setting and the value in the registry.pol file + """ + # reg_pol_mach has MyValue3 with REG_DWORD value of 0, let's set it to 1 + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata="1", + vtype="REG_DWORD", + ) + # Now the registry and Registry.pol file are out of sync + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data="0", + v_type="REG_DWORD", + ) + expected = { + "changes": { + "new": { + "reg": { + "data": 0, + } + }, + "old": { + "reg": { + "data": 1, + } + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_machine_value_present_existing_change(reg_pol_mach): + """ + Test value.present with existing incorrect value in Machine policy """ result = lgpo_reg.value_present( name="MyValue1", @@ -109,24 +272,72 @@ def test_value_present_existing_change(reg_pol): expected = { "changes": { "new": { - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue1", "result": True, } assert result == expected -def test_value_present_existing_no_change(reg_pol): +def test_machine_value_present_existing_change_dword(reg_pol_mach): """ - Test value.present with existing correct value + Test value.present with existing incorrect value in Machine policy + """ + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data=1, + v_type="REG_DWORD", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 1, + }, + "reg": { + "data": 1, + }, + }, + "old": { + "pol": { + "data": 0, + }, + "reg": { + "data": 0, + }, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_machine_value_present_existing_no_change(reg_pol_mach): + """ + Test value.present with existing correct value in Machine policy """ result = lgpo_reg.value_present( name="MyValue1", @@ -136,36 +347,36 @@ def test_value_present_existing_no_change(reg_pol): ) expected = { "changes": {}, - "comment": "Registry.pol value already present", + "comment": "Policy value already present\nRegistry value already present", "name": "MyValue1", "result": True, } assert result == expected -def test_value_present_test_true(empty_reg_pol): +def test_machine_value_present_test_true(empty_reg_pol_mach): """ - Test value.present with test=True + Test value.present with test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", ) expected = { "changes": {}, - "comment": "Registry.pol value will be set", + "comment": "Policy value will be set\nRegistry value will be set", "name": "MyValue", "result": None, } assert result == expected -def test_value_present_existing_disabled(reg_pol): +def test_machine_value_present_existing_disabled(reg_pol_mach): """ - Test value.present with existing value that is disabled + Test value.present with existing value that is disabled in Machine policy """ result = lgpo_reg.value_present( name="MyValue2", @@ -176,24 +387,33 @@ def test_value_present_existing_disabled(reg_pol): expected = { "changes": { "new": { - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "**del.MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + "reg": {}, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue2", "result": True, } assert result == expected -def test_value_disabled(empty_reg_pol): +def test_machine_value_disabled(empty_reg_pol_mach): """ - Test value.disabled + Test value.disabled in Machine policy """ result = lgpo_reg.value_disabled( name="MyValue1", @@ -202,21 +422,24 @@ def test_value_disabled(empty_reg_pol): expected = { "changes": { "new": { - "data": "**del.MyValue1", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, }, - "old": {}, + "old": {"pol": {}}, }, - "comment": "Registry.pol value disabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } assert result == expected -def test_value_disabled_existing_change(reg_pol): +def test_machine_value_disabled_existing_change(reg_pol_mach): """ - Test value.disabled with an existing value that is not disabled + Test value.disabled with an existing value that is not disabled in Machine + policy """ result = lgpo_reg.value_disabled( name="MyValue1", @@ -225,22 +448,28 @@ def test_value_disabled_existing_change(reg_pol): expected = { "changes": { "new": { - "data": "**del.MyValue1", + "pol": { + "data": "**del.MyValue1", + }, + "reg": {}, }, "old": { - "data": "squidward", + "pol": { + "data": "squidward", + }, + "reg": {"data": "squidward", "type": "REG_SZ"}, }, }, - "comment": "Registry.pol value disabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } assert result == expected -def test_value_disabled_existing_no_change(reg_pol): +def test_machine_value_disabled_existing_no_change(reg_pol_mach): """ - Test value.disabled with an existing disabled value + Test value.disabled with an existing disabled value in Machine policy """ result = lgpo_reg.value_disabled( name="MyValue2", @@ -248,94 +477,486 @@ def test_value_disabled_existing_no_change(reg_pol): ) expected = { "changes": {}, - "comment": "Registry.pol value already disabled", + "comment": "Registry policy value already disabled", "name": "MyValue2", "result": True, } assert result == expected -def test_value_disabled_test_true(empty_reg_pol): +def test_machine_value_disabled_test_true(empty_reg_pol_mach): """ - Test value.disabled when test=True + Test value.disabled when test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_disabled( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", ) expected = { "changes": {}, - "comment": "Registry.pol value will be disabled", + "comment": "Policy value will be disabled", "name": "MyValue", "result": None, } assert result == expected -def test_value_absent(reg_pol): +def test_machine_value_absent(reg_pol_mach): """ - Test value.absent + Test value.absent in Machine policy """ result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": { - "new": {}, + "new": {"pol": {}, "reg": {}}, "old": { - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue1", "result": True, } assert result == expected -def test_value_absent_no_change(empty_reg_pol): +def test_machine_value_absent_no_change(empty_reg_pol_mach): """ - Test value.absent when the value is already absent + Test value.absent when the value is already absent in Machine policy """ result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": {}, - "comment": "Registry.pol value already absent", + "comment": "Registry policy value already deleted", "name": "MyValue1", "result": True, } assert result == expected -def test_value_absent_disabled(reg_pol): +def test_machine_value_absent_disabled(reg_pol_mach): """ - Test value.absent when the value is disabled + Test value.absent when the value is disabled in Machine policy """ result = lgpo_reg.value_absent(name="MyValue2", key="SOFTWARE\\MyKey1") expected = { "changes": { - "new": {}, + "new": {"pol": {}}, "old": { - "data": "**del.MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue2", "result": True, } assert result == expected -def test_value_absent_test_true(reg_pol): +def test_machine_value_absent_test_true(reg_pol_mach): """ - Test value.absent with test=True + Test value.absent with test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": {}, - "comment": "Registry.pol value will be deleted", + "comment": "Policy value will be deleted\nRegistry value will be deleted", + "name": "MyValue1", + "result": None, + } + assert result == expected + + +def test_user_value_present(empty_reg_pol_user): + """ + Test value.present in User policy + """ + result = lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 1, + "type": "REG_DWORD", + }, + "reg": { + "data": 1, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": {}, + "reg": {}, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_change(reg_pol_user): + """ + Test value.present with existing incorrect value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue1", + key="SOFTWARE\\MyKey1", + v_data="2", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_change_dword(reg_pol_user): + """ + Test value.present with existing incorrect value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data=1, + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 1, + }, + "reg": { + "data": 1, + }, + }, + "old": { + "pol": { + "data": 0, + }, + "reg": { + "data": 0, + }, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_no_change(reg_pol_user): + """ + Test value.present with existing correct value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue1", + key="SOFTWARE\\MyKey1", + v_data="squidward", + v_type="REG_SZ", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value already present\nRegistry value already present", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_present_test_true(empty_reg_pol_user): + """ + Test value.present with test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value will be set\nRegistry value will be set", + "name": "MyValue", + "result": None, + } + assert result == expected + + +def test_user_value_present_existing_disabled(reg_pol_user): + """ + Test value.present with existing value that is disabled in User policy + """ + result = lgpo_reg.value_present( + name="MyValue2", + key="SOFTWARE\\MyKey1", + v_data="2", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + "reg": {}, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_disabled(empty_reg_pol_user): + """ + Test value.disabled in User policy + """ + result = lgpo_reg.value_disabled( + name="MyValue1", key="SOFTWARE\\MyKey1", policy_class="User" + ) + expected = { + "changes": { + "new": { + "pol": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, + }, + "old": {"pol": {}}, + }, + "comment": "Registry policy value disabled", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_existing_change(reg_pol_user): + """ + Test value.disabled with an existing value that is not disabled in User + policy + """ + result = lgpo_reg.value_disabled( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": "**del.MyValue1", + }, + "reg": {}, + }, + "old": { + "pol": { + "data": "squidward", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value disabled", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_existing_no_change(reg_pol_user): + """ + Test value.disabled with an existing disabled value in User policy + """ + result = lgpo_reg.value_disabled( + name="MyValue2", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry policy value already disabled", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_test_true(empty_reg_pol_user): + """ + Test value.disabled when test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_disabled( + name="MyValue", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value will be disabled", + "name": "MyValue", + "result": None, + } + assert result == expected + + +def test_user_value_absent(reg_pol_user): + """ + Test value.absent in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": {}, + "reg": {}, + }, + "old": { + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value deleted", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_absent_no_change(empty_reg_pol_user): + """ + Test value.absent when the value is already absent in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry policy value already deleted", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_absent_disabled(reg_pol_user): + """ + Test value.absent when the value is disabled in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue2", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": {"pol": {}}, + "old": { + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value deleted", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_absent_test_true(reg_pol_user): + """ + Test value.absent with test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value will be deleted\nRegistry value will be deleted", "name": "MyValue1", "result": None, } diff --git a/tests/pytests/unit/utils/test_win_reg.py b/tests/pytests/unit/utils/test_win_reg.py index fa7c0186553..206c40b3089 100644 --- a/tests/pytests/unit/utils/test_win_reg.py +++ b/tests/pytests/unit/utils/test_win_reg.py @@ -338,6 +338,7 @@ def test_read_value_non_existing(): "Windows\\CurrentVersion" ), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", @@ -360,6 +361,7 @@ def test_read_value_non_existing_key(fake_key): expected = { "comment": "Cannot find key: HKLM\\{}".format(fake_key), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", @@ -375,6 +377,7 @@ def test_read_value_access_denied(fake_key): expected = { "comment": "Access is denied: HKLM\\{}".format(fake_key), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", diff --git a/tests/unit/modules/test_reg.py b/tests/unit/modules/test_reg.py deleted file mode 100644 index 3afe79af223..00000000000 --- a/tests/unit/modules/test_reg.py +++ /dev/null @@ -1,872 +0,0 @@ -import pytest -from saltfactories.utils import random_string - -import salt.modules.reg as reg -import salt.utils.stringutils -import salt.utils.win_reg -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -try: - import win32api - - HAS_WIN32 = True -except ImportError: - HAS_WIN32 = False - -UNICODE_KEY = "Unicode Key \N{TRADE MARK SIGN}" -UNICODE_VALUE = ( - "Unicode Value \N{COPYRIGHT SIGN},\N{TRADE MARK SIGN},\N{REGISTERED SIGN}" -) -FAKE_KEY = "SOFTWARE\\{}".format(random_string("SaltTesting-", lowercase=False)) - - -@pytest.mark.skipif(not HAS_WIN32, reason="Tests require win32 libraries") -class WinFunctionsTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.reg - """ - - def setup_loader_modules(self): - return { - reg: { - "__utils__": { - "reg.delete_value": salt.utils.win_reg.delete_value, - "reg.delete_key_recursive": salt.utils.win_reg.delete_key_recursive, - "reg.key_exists": salt.utils.win_reg.key_exists, - "reg.list_keys": salt.utils.win_reg.list_keys, - "reg.list_values": salt.utils.win_reg.list_values, - "reg.read_value": salt.utils.win_reg.read_value, - "reg.set_value": salt.utils.win_reg.set_value, - "reg.value_exists": salt.utils.win_reg.value_exists, - } - } - } - - def test_key_exists_existing(self): - """ - Tests the key_exists function using a well known registry key - """ - self.assertTrue(reg.key_exists(hive="HKLM", key="SOFTWARE\\Microsoft")) - - def test_key_exists_non_existing(self): - """ - Tests the key_exists function using a non existing registry key - """ - self.assertFalse(reg.key_exists(hive="HKLM", key=FAKE_KEY)) - - def test_key_exists_invalid_hive(self): - """ - Tests the key_exists function using an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.key_exists, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - ) - - def test_key_exists_unknown_key_error(self): - """ - Tests the key_exists function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, reg.key_exists, hive="HKLM", key="SOFTWARE\\Microsoft" - ) - - def test_value_exists_existing(self): - """ - Tests the value_exists function using a well known registry key - """ - self.assertTrue( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - ) - - def test_value_exists_non_existing(self): - """ - Tests the value_exists function using a non existing registry key - """ - self.assertFalse( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="NonExistingValueName", - ) - ) - - def test_value_exists_invalid_hive(self): - """ - Tests the value_exists function using an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.value_exists, - hive="BADHIVE", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - - def test_value_exists_key_not_exist(self): - """ - Tests the value_exists function when the key does not exist - """ - mock_error = MagicMock( - side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertFalse( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - ) - - def test_value_exists_unknown_key_error(self): - """ - Tests the value_exists function with an unknown error when opening the - key - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, - reg.value_exists, - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="CommonFilesDir", - ) - - def test_value_exists_empty_default_value(self): - """ - Tests the value_exists function when querying the default value - """ - mock_error = MagicMock( - side_effect=win32api.error(2, "RegQueryValueEx", "Empty Value") - ) - with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): - self.assertTrue( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname=None, - ) - ) - - def test_value_exists_no_vname(self): - """ - Tests the value_exists function when the vname does not exist - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegQueryValueEx", "Empty Value") - ) - with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): - self.assertFalse( - reg.value_exists( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="NonExistingValuePair", - ) - ) - - def test_list_keys_existing(self): - """ - Test the list_keys function using a well known registry key - """ - self.assertIn("Microsoft", reg.list_keys(hive="HKLM", key="SOFTWARE")) - - def test_list_keys_non_existing(self): - """ - Test the list_keys function using a non existing registry key - """ - expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) - self.assertEqual(reg.list_keys(hive="HKLM", key=FAKE_KEY), expected) - - def test_list_keys_invalid_hive(self): - """ - Test the list_keys function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.list_keys, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - ) - - def test_list_keys_unknown_key_error(self): - """ - Tests the list_keys function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, reg.list_keys, hive="HKLM", key="SOFTWARE\\Microsoft" - ) - - def test_list_values_existing(self): - """ - Test the list_values function using a well known registry key - """ - values = reg.list_values( - hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" - ) - keys = [] - for value in values: - keys.append(value["vname"]) - self.assertIn("ProgramFilesDir", keys) - - def test_list_values_non_existing(self): - """ - Test the list_values function using a non existing registry key - """ - expected = (False, "Cannot find key: HKLM\\{}".format(FAKE_KEY)) - self.assertEqual(reg.list_values(hive="HKLM", key=FAKE_KEY), expected) - - def test_list_values_invalid_hive(self): - """ - Test the list_values function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.list_values, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - ) - - def test_list_values_unknown_key_error(self): - """ - Tests the list_values function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, reg.list_values, hive="HKLM", key="SOFTWARE\\Microsoft" - ) - - def test_read_value_existing(self): - """ - Test the read_value function using a well known registry value - """ - ret = reg.read_value( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="ProgramFilesPath", - ) - self.assertEqual(ret["vdata"], "%ProgramFiles%") - - def test_read_value_default(self): - """ - Test the read_value function reading the default value using a well - known registry key - """ - ret = reg.read_value( - hive="HKLM", key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion" - ) - self.assertEqual(ret["vdata"], "(value not set)") - - def test_read_value_non_existing(self): - """ - Test the read_value function using a non existing value pair - """ - expected = { - "comment": ( - "Cannot find fake_name in HKLM\\SOFTWARE\\Microsoft\\" - "Windows\\CurrentVersion" - ), - "vdata": None, - "vname": "fake_name", - "success": False, - "hive": "HKLM", - "key": "SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - } - self.assertDictEqual( - reg.read_value( - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="fake_name", - ), - expected, - ) - - def test_read_value_non_existing_key(self): - """ - Test the read_value function using a non existing registry key - """ - expected = { - "comment": "Cannot find key: HKLM\\{}".format(FAKE_KEY), - "vdata": None, - "vname": "fake_name", - "success": False, - "hive": "HKLM", - "key": FAKE_KEY, - } - self.assertDictEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name"), expected - ) - - def test_read_value_invalid_hive(self): - """ - Test the read_value function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.read_value, - hive="BADHIVE", - key="SOFTWARE\\Microsoft", - vname="ProgramFilesPath", - ) - - def test_read_value_unknown_key_error(self): - """ - Tests the read_value function with an unknown key error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, - reg.read_value, - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="ProgramFilesPath", - ) - - def test_read_value_unknown_value_error(self): - """ - Tests the read_value function with an unknown value error - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegQueryValueEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegQueryValueEx", mock_error): - self.assertRaises( - win32api.error, - reg.read_value, - hive="HKLM", - key="SOFTWARE\\Microsoft\\Windows\\CurrentVersion", - vname="ProgramFilesPath", - ) - - @pytest.mark.destructive_test - def test_read_value_multi_sz_empty_list(self): - """ - An empty REG_MULTI_SZ value should return an empty list, not None - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key=FAKE_KEY, - vname="empty_list", - vdata=[], - vtype="REG_MULTI_SZ", - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": [], - "vname": "empty_list", - "vtype": "REG_MULTI_SZ", - } - self.assertEqual( - reg.read_value( - hive="HKLM", - key=FAKE_KEY, - vname="empty_list", - ), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value(self): - """ - Test the set_value function - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": "fake_data", - "vname": "fake_name", - "vtype": "REG_SZ", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_name"), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_default(self): - """ - Test the set_value function on the default value - """ - try: - self.assertTrue( - reg.set_value(hive="HKLM", key=FAKE_KEY, vdata="fake_default_data") - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": "fake_default_data", - "vname": "(Default)", - "vtype": "REG_SZ", - } - self.assertEqual(reg.read_value(hive="HKLM", key=FAKE_KEY), expected) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_unicode_key(self): - """ - Test the set_value function on a unicode key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - vdata="fake_value", - ) - ) - expected = { - "hive": "HKLM", - "key": "\\".join([FAKE_KEY, UNICODE_KEY]), - "success": True, - "vdata": "fake_value", - "vname": "fake_name", - "vtype": "REG_SZ", - } - self.assertEqual( - reg.read_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - ), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_unicode_value(self): - """ - Test the set_value function on a unicode value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": UNICODE_VALUE, - "vname": "fake_unicode", - "vtype": "REG_SZ", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode"), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_reg_dword(self): - """ - Test the set_value function on a REG_DWORD value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key=FAKE_KEY, - vname="dword_value", - vdata=123, - vtype="REG_DWORD", - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": 123, - "vname": "dword_value", - "vtype": "REG_DWORD", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="dword_value"), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_set_value_reg_qword(self): - """ - Test the set_value function on a REG_QWORD value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key=FAKE_KEY, - vname="qword_value", - vdata=123, - vtype="REG_QWORD", - ) - ) - expected = { - "hive": "HKLM", - "key": FAKE_KEY, - "success": True, - "vdata": 123, - "vname": "qword_value", - "vtype": "REG_QWORD", - } - self.assertEqual( - reg.read_value(hive="HKLM", key=FAKE_KEY, vname="qword_value"), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - def test_set_value_invalid_hive(self): - """ - Test the set_value function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.set_value, - hive="BADHIVE", - key=FAKE_KEY, - vname="fake_name", - vdata="fake_data", - ) - - def test_set_value_open_create_failure(self): - """ - Test the set_value function when there is a problem opening/creating - the key - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegCreateKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegCreateKeyEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - def test_set_value_type_error(self): - """ - Test the set_value function when the wrong type of data is passed - """ - mock_error = MagicMock(side_effect=TypeError("Mocked TypeError")) - with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - def test_set_value_system_error(self): - """ - Test the set_value function when a SystemError occurs while setting the - value - """ - mock_error = MagicMock(side_effect=SystemError("Mocked SystemError")) - with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - def test_set_value_value_error(self): - """ - Test the set_value function when a ValueError occurs while setting the - value - """ - mock_error = MagicMock(side_effect=ValueError("Mocked ValueError")) - with patch("salt.utils.win_reg.win32api.RegSetValueEx", mock_error): - self.assertFalse( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - - @pytest.mark.destructive_test - def test_delete_value(self): - """ - Test the delete_value function - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_data" - ) - ) - self.assertTrue( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - def test_delete_value_non_existing(self): - """ - Test the delete_value function on non existing value - """ - mock_error = MagicMock( - side_effect=win32api.error(2, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertIsNone( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_name") - ) - - def test_delete_value_invalid_hive(self): - """ - Test the delete_value function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.delete_value, - hive="BADHIVE", - key=FAKE_KEY, - vname="fake_name", - ) - - def test_delete_value_unknown_error(self): - """ - Test the delete_value function when there is a problem opening the key - """ - mock_error = MagicMock( - side_effect=win32api.error(123, "RegOpenKeyEx", "Unknown error") - ) - with patch("salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error): - self.assertRaises( - win32api.error, - reg.delete_value, - hive="HKLM", - key=FAKE_KEY, - vname="fake_name", - ) - - @pytest.mark.destructive_test - def test_delete_value_unicode(self): - """ - Test the delete_value function on a unicode value - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_unicode", vdata=UNICODE_VALUE - ) - ) - self.assertTrue( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname="fake_unicode") - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_value_unicode_vname(self): - """ - Test the delete_value function on a unicode vname - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY, vdata="junk data" - ) - ) - self.assertTrue( - reg.delete_value(hive="HKLM", key=FAKE_KEY, vname=UNICODE_KEY) - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_value_unicode_key(self): - """ - Test the delete_value function on a unicode key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - vdata="junk data", - ) - ) - self.assertTrue( - reg.delete_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - ) - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - def test_delete_key_recursive_invalid_hive(self): - """ - Test the delete_key_recursive function when passing an invalid hive - """ - self.assertRaises( - CommandExecutionError, - reg.delete_key_recursive, - hive="BADHIVE", - key=FAKE_KEY, - ) - - def test_delete_key_recursive_key_not_found(self): - """ - Test the delete_key_recursive function when the passed key to delete is - not found. - """ - self.assertFalse(reg.key_exists(hive="HKLM", key=FAKE_KEY)) - self.assertFalse(reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY)) - - def test_delete_key_recursive_too_close(self): - """ - Test the delete_key_recursive function when the passed key to delete is - too close to root, such as - """ - mock_true = MagicMock(return_value=True) - with patch("salt.utils.win_reg.key_exists", mock_true): - self.assertFalse(reg.delete_key_recursive(hive="HKLM", key="FAKE_KEY")) - - @pytest.mark.destructive_test - def test_delete_key_recursive(self): - """ - Test the delete_key_recursive function - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" - ) - ) - expected = {"Deleted": ["\\".join(["HKLM", FAKE_KEY])], "Failed": []} - self.assertDictEqual( - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_key_recursive_failed_to_open_key(self): - """ - Test the delete_key_recursive function on failure to open the key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" - ) - ) - expected = { - "Deleted": [], - "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Failed to connect to key"], - } - mock_true = MagicMock(return_value=True) - mock_error = MagicMock( - side_effect=[ - 1, - win32api.error(3, "RegOpenKeyEx", "Failed to connect to key"), - ] - ) - with patch("salt.utils.win_reg.key_exists", mock_true), patch( - "salt.utils.win_reg.win32api.RegOpenKeyEx", mock_error - ): - self.assertDictEqual( - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_key_recursive_failed_to_delete(self): - """ - Test the delete_key_recursive function on failure to delete a key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", key=FAKE_KEY, vname="fake_name", vdata="fake_value" - ) - ) - expected = { - "Deleted": [], - "Failed": ["\\".join(["HKLM", FAKE_KEY]) + " Unknown error"], - } - # pylint: disable=undefined-variable - mock_error = MagicMock(side_effect=WindowsError("Unknown error")) - # pylint: enable=undefined-variable - with patch("salt.utils.win_reg.win32api.RegDeleteKey", mock_error): - self.assertDictEqual( - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY), expected - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) - - @pytest.mark.destructive_test - def test_delete_key_recursive_unicode(self): - """ - Test the delete_key_recursive function on value within a unicode key - """ - try: - self.assertTrue( - reg.set_value( - hive="HKLM", - key="\\".join([FAKE_KEY, UNICODE_KEY]), - vname="fake_name", - vdata="fake_value", - ) - ) - expected = { - "Deleted": ["\\".join(["HKLM", FAKE_KEY, UNICODE_KEY])], - "Failed": [], - } - self.assertDictEqual( - reg.delete_key_recursive( - hive="HKLM", key="\\".join([FAKE_KEY, UNICODE_KEY]) - ), - expected, - ) - finally: - reg.delete_key_recursive(hive="HKLM", key=FAKE_KEY) diff --git a/tools/ci.py b/tools/ci.py index 748bea85ac5..ba7a7c2f849 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -648,41 +648,27 @@ def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): @ci.command( - name="pkg-download-matrix", + name="get-releases", arguments={ - "platform": { - "help": "The OS platform to generate the matrix for", - "choices": ("linux", "windows", "macos", "darwin"), + "repository": { + "help": "The repository to query for releases, e.g. saltstack/salt", }, }, ) -def pkg_download_matrix(ctx: Context, platform: str): +def get_releases(ctx: Context, repository: str = "saltstack/salt"): """ - Generate the test matrix. + Generate the latest salt release. """ github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is None: - ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") - tests = [] - arches = [] - if platform == "windows": - for arch in ("amd64", "x86"): - arches.append({"arch": arch}) - for install_type in ("msi", "nsis"): - tests.append({"arch": arch, "install_type": install_type}) + if github_output is None: + ctx.exit(1, "The 'GITHUB_OUTPUT' variable is not set.") else: - for arch in ("x86_64", "aarch64"): - if platform in ("macos", "darwin") and arch == "aarch64": - continue - arches.append({"arch": arch}) - tests.append({"arch": arch}) - ctx.info("Generated arch matrix:") - ctx.print(arches, soft_wrap=True) - ctx.info("Generated test matrix:") - ctx.print(tests, soft_wrap=True) - if github_output is not None: + releases = tools.utils.get_salt_releases(ctx, repository) + str_releases = [str(version) for version in releases] + latest = str_releases[-1] + with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"arch={json.dumps(arches)}\n") - wfh.write(f"tests={json.dumps(tests)}\n") - ctx.exit(0) + wfh.write(f"latest-release={latest}\n") + wfh.write(f"releases={json.dumps(str_releases)}\n") + ctx.exit(0) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index b3f92ef615c..b373338a99e 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -4,6 +4,7 @@ These commands are used to build the salt onedir and system packages. # pylint: disable=resource-leakage,broad-except from __future__ import annotations +import json import logging import os import pathlib @@ -393,20 +394,47 @@ def onedir_dependencies( # We import relenv here because it is not a hard requirement for the rest of the tools commands try: - from relenv.create import create + import relenv.create except ImportError: ctx.exit(1, "Relenv not installed in the current environment.") dest = pathlib.Path(package_name).resolve() - create(dest, arch=arch, version=python_version) + relenv.create.create(dest, arch=arch, version=python_version) + + # Validate that we're using the relenv version we really want to + if platform == "windows": + env_scripts_dir = dest / "Scripts" + else: + env_scripts_dir = dest / "bin" + + ret = ctx.run( + str(env_scripts_dir / "relenv"), "--version", capture=True, check=False + ) + if ret.returncode: + ctx.error(f"Failed to get the relenv version: {ret}") + ctx.exit(1) + + target_relenv_version = _get_shared_constants()["relenv_version"] + env_relenv_version = ret.stdout.strip().decode() + if env_relenv_version != target_relenv_version: + ctx.error( + f"The onedir installed relenv version({env_relenv_version}) is not " + f"the relenv version which should be used({target_relenv_version})." + ) + ctx.exit(1) + + ctx.info( + f"The relenv version installed in the onedir env({env_relenv_version}) " + f"matches the version which must be used." + ) env = os.environ.copy() install_args = ["-v"] if platform == "windows": - python_bin = dest / "Scripts" / "python" + python_bin = env_scripts_dir / "python" else: env["RELENV_BUILDENV"] = "1" - python_bin = dest / "bin" / "python3" + python_bin = env_scripts_dir / "python3" install_args.extend( [ "--use-pep517", @@ -466,8 +494,6 @@ def onedir_dependencies( str(requirements_file), env=env, ) - extras_dir = dest / f"extras-{requirements_version}" - extras_dir.mkdir() @build.command( @@ -503,6 +529,33 @@ def salt_onedir( onedir_env = pathlib.Path(package_name).resolve() _check_pkg_build_files_exist(ctx, onedir_env=onedir_env, salt_archive=salt_archive) + # Validate that we're using the relenv version we really want to + if platform == "windows": + env_scripts_dir = onedir_env / "Scripts" + else: + env_scripts_dir = onedir_env / "bin" + + ret = ctx.run( + str(env_scripts_dir / "relenv"), "--version", capture=True, check=False + ) + if ret.returncode: + ctx.error(f"Failed to get the relenv version: {ret}") + ctx.exit(1) + + target_relenv_version = _get_shared_constants()["relenv_version"] + env_relenv_version = ret.stdout.strip().decode() + if env_relenv_version != target_relenv_version: + ctx.error( + f"The onedir installed relenv version({env_relenv_version}) is not " + f"the relenv version which should be used({target_relenv_version})." + ) + ctx.exit(1) + + ctx.info( + f"The relenv version installed in the onedir env({env_relenv_version}) " + f"matches the version which must be used." + ) + env = os.environ.copy() env["USE_STATIC_REQUIREMENTS"] = "1" env["RELENV_BUILDENV"] = "1" @@ -525,18 +578,76 @@ def salt_onedir( "-CICD", env=env, ) + python_executable = str(env_scripts_dir / "python.exe") + ret = ctx.run( + python_executable, + "-c", + "import json, sys, site, pathlib; sys.stdout.write(json.dumps([pathlib.Path(p).as_posix() for p in site.getsitepackages()]))", + capture=True, + ) + if ret.returncode: + ctx.error(f"Failed to get the path to `site-packages`: {ret}") + ctx.exit(1) + site_packages_json = json.loads(ret.stdout.strip().decode()) + ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") else: env["RELENV_PIP_DIR"] = "1" - pip_bin = onedir_env / "bin" / "pip3" - ctx.run(str(pip_bin), "install", str(salt_archive), env=env) + pip_bin = env_scripts_dir / "pip3" + ctx.run( + str(pip_bin), + "install", + "--no-warn-script-location", + str(salt_archive), + env=env, + ) if platform == "darwin": def errfn(fn, path, err): ctx.info(f"Removing {path} failed: {err}") - shutil.rmtree(onedir_env / "opt", onerror=errfn) - shutil.rmtree(onedir_env / "etc", onerror=errfn) - shutil.rmtree(onedir_env / "Library", onerror=errfn) + for subdir in ("opt", "etc", "Library"): + path = onedir_env / subdir + if path.exists(): + shutil.rmtree(path, onerror=errfn) + + python_executable = str(env_scripts_dir / "python3") + ret = ctx.run( + python_executable, + "-c", + "import json, sys, site, pathlib; sys.stdout.write(json.dumps(site.getsitepackages()))", + capture=True, + ) + if ret.returncode: + ctx.error(f"Failed to get the path to `site-packages`: {ret}") + ctx.exit(1) + site_packages_json = json.loads(ret.stdout.strip().decode()) + ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") + + site_packages: str + for site_packages_path in site_packages_json: + if "site-packages" in site_packages_path: + site_packages = site_packages_path + break + else: + ctx.error("Cloud not find a site-packages path with 'site-packages' in it?!") + ctx.exit(1) + + ret = ctx.run( + str(python_executable), + "-c", + "import sys; print('{}.{}'.format(*sys.version_info))", + capture=True, + ) + python_version_info = ret.stdout.strip().decode() + extras_dir = onedir_env / f"extras-{python_version_info}" + ctx.info(f"Creating Salt's extras path: {extras_dir}") + extras_dir.mkdir(exist_ok=True) + + for fname in ("_salt_onedir_extras.py", "_salt_onedir_extras.pth"): + src = tools.utils.REPO_ROOT / "pkg" / "common" / "onedir" / fname + dst = pathlib.Path(site_packages) / fname + ctx.info(f"Copying '{src.relative_to(tools.utils.REPO_ROOT)}' to '{dst}' ...") + shutil.copyfile(src, dst) def _check_pkg_build_files_exist(ctx: Context, **kwargs): diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py index 88f0115a810..d781cf3c8ff 100644 --- a/tools/pkg/repo.py +++ b/tools/pkg/repo.py @@ -23,7 +23,7 @@ from ptscripts import Context, command_group import tools.pkg import tools.utils -from tools.utils import Version +from tools.utils import Version, get_salt_releases try: import boto3 @@ -122,7 +122,7 @@ _deb_distro_info = { ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -136,7 +136,7 @@ def debian( repo_path: pathlib.Path = None, key_id: str = None, distro_arch: str = "amd64", - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the debian repository. @@ -178,7 +178,7 @@ def debian( ftp_archive_config_suite = ( f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" ) - archive_description = f"SaltProject {display_name} Python 3{'' if nightly_build else ' development'} Salt package repo" + archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" ftp_archive_config = f"""\ APT::FTPArchive::Release::Origin "SaltProject"; APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} @@ -198,22 +198,25 @@ def debian( """ ctx.info("Creating repository directory structure ...") create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, distro_version=distro_version, distro_arch=distro_arch, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( + ctx, repo_path, salt_version, distro, distro_version=distro_version, distro_arch=distro_arch, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, ) ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" ctx.info(f"Writing {ftp_archive_config_file} ...") @@ -300,7 +303,7 @@ def debian( ctx.info(f"Running '{' '.join(cmdline)}' ...") ctx.run(*cmdline, cwd=create_repo_path) - if nightly_build is False: + if not nightly_build_from: remote_versions = _get_remote_versions( tools.utils.STAGING_BUCKET_NAME, create_repo_path.parent.relative_to(repo_path), @@ -319,10 +322,6 @@ def debian( latest_link = create_repo_path.parent.parent / "latest" ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") latest_link.symlink_to(f"minor/{salt_version}") - else: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) ctx.info("Done") @@ -373,7 +372,7 @@ _rpm_distro_info = { ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -387,7 +386,7 @@ def rpm( repo_path: pathlib.Path = None, key_id: str = None, distro_arch: str = "amd64", - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the redhat repository. @@ -410,22 +409,25 @@ def rpm( ctx.info("Creating repository directory structure ...") create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, distro_version=distro_version, distro_arch=distro_arch, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( + ctx, repo_path, salt_version, distro, distro_version=distro_version, distro_arch=distro_arch, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use @@ -472,7 +474,7 @@ def rpm( else: ctx.run("createrepo", ".", cwd=create_repo_path) - if nightly_build: + if nightly_build_from: repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") else: repo_domain = os.environ.get( @@ -496,8 +498,8 @@ def rpm( def _create_repo_file(create_repo_path, url_suffix): ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") - if nightly_build: - base_url = "salt-dev/" + if nightly_build_from: + base_url = f"salt-dev/{nightly_build_from}/" repo_file_contents = "[salt-nightly-repo]" elif "rc" in salt_version: base_url = "salt_rc/" @@ -532,14 +534,14 @@ def rpm( ) create_repo_path.write_text(repo_file_contents) - if nightly_build: + if nightly_build_from: repo_file_path = create_repo_path.parent / "nightly.repo" else: repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" _create_repo_file(repo_file_path, f"minor/{salt_version}") - if nightly_build is False: + if not nightly_build_from: remote_versions = _get_remote_versions( tools.utils.STAGING_BUCKET_NAME, create_repo_path.parent.relative_to(repo_path), @@ -562,12 +564,6 @@ def rpm( latest_link.symlink_to(f"minor/{salt_version}") repo_file_path = create_repo_path.parent.parent / "latest.repo" _create_repo_file(repo_file_path, "latest") - else: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - repo_file_path = create_repo_path.parent.parent / "latest.repo" - _create_repo_file(repo_file_path, "latest") ctx.info("Done") @@ -594,7 +590,7 @@ def rpm( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -605,7 +601,7 @@ def windows( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the windows repository. @@ -618,7 +614,7 @@ def windows( _create_onedir_based_repo( ctx, salt_version=salt_version, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, repo_path=repo_path, incoming=incoming, key_id=key_id, @@ -650,7 +646,7 @@ def windows( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -661,7 +657,7 @@ def macos( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the windows repository. @@ -674,7 +670,7 @@ def macos( _create_onedir_based_repo( ctx, salt_version=salt_version, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, repo_path=repo_path, incoming=incoming, key_id=key_id, @@ -706,7 +702,7 @@ def macos( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -717,7 +713,7 @@ def onedir( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the onedir repository. @@ -730,7 +726,7 @@ def onedir( _create_onedir_based_repo( ctx, salt_version=salt_version, - nightly_build=nightly_build, + nightly_build_from=nightly_build_from, repo_path=repo_path, incoming=incoming, key_id=key_id, @@ -762,7 +758,7 @@ def onedir( ), "required": True, }, - "nightly_build": { + "nightly_build_from": { "help": "Developement repository target", }, }, @@ -773,7 +769,7 @@ def src( incoming: pathlib.Path = None, repo_path: pathlib.Path = None, key_id: str = None, - nightly_build: bool = False, + nightly_build_from: str = None, ): """ Create the onedir repository. @@ -785,7 +781,16 @@ def src( assert key_id is not None ctx.info("Creating repository directory structure ...") - create_repo_path = repo_path / "salt" / "py3" / "src" / salt_version + create_repo_path = _create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro="src", + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + create_repo_path = create_repo_path / salt_version create_repo_path.mkdir(exist_ok=True, parents=True) hashes_base_path = create_repo_path / f"salt-{salt_version}" for fpath in incoming.iterdir(): @@ -804,6 +809,8 @@ def src( wfh.write(f"{hexdigest} {dpath.name}\n") for fpath in create_repo_path.iterdir(): + if fpath.suffix in (".pub", ".gpg"): + continue tools.utils.gpg_sign(ctx, key_id, fpath) # Export the GPG key in use @@ -868,6 +875,10 @@ def restore_previous_releases(ctx: Context): Restore release bucket from backup. """ _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"backup-complete=true\n") ctx.info("Done") @@ -941,10 +952,13 @@ def release(ctx: Context, salt_version: str): with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name try: + bucket_name = tools.utils.STAGING_BUCKET_NAME with local_release_files_path.open("wb") as wfh: - ctx.info(f"Downloading {repo_release_files_path} ...") + ctx.info( + f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." + ) s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, + Bucket=bucket_name, Key=str(repo_release_files_path), Fileobj=wfh, ) @@ -968,9 +982,11 @@ def release(ctx: Context, salt_version: str): ) try: with local_release_symlinks_path.open("wb") as wfh: - ctx.info(f"Downloading {repo_release_symlinks_path} ...") + ctx.info( + f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." + ) s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, + Bucket=bucket_name, Key=str(repo_release_symlinks_path), Fileobj=wfh, ) @@ -1053,6 +1069,7 @@ def release(ctx: Context, salt_version: str): for distro in ("windows", "macos", "onedir"): create_repo_path = _create_repo_path( + ctx, repo_path, salt_version, distro=distro, @@ -1285,7 +1302,7 @@ def github( with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") - releases = _get_salt_releases(ctx, repository) + releases = get_salt_releases(ctx, repository) if Version(salt_version) >= releases[-1]: make_latest = True else: @@ -1326,7 +1343,7 @@ def confirm_unreleased( """ Confirm that the passed version is not yet tagged and/or released. """ - releases = _get_salt_releases(ctx, repository) + releases = get_salt_releases(ctx, repository) if Version(salt_version) in releases: ctx.error(f"There's already a '{salt_version}' tag or github release.") ctx.exit(1) @@ -1334,55 +1351,55 @@ def confirm_unreleased( ctx.exit(0) -def _get_salt_releases(ctx: Context, repository: str) -> list[Version]: - """ - Return a list of salt versions - """ - versions = set() - with ctx.web as web: - headers = { - "Accept": "application/vnd.github+json", - } - if "GITHUB_TOKEN" in os.environ: - headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}" - web.headers.update(headers) - ret = web.get(f"https://api.github.com/repos/{repository}/tags") - if ret.status_code != 200: - ctx.error( - f"Failed to get the tags for repository {repository!r}: {ret.reason}" +@repo.command( + name="confirm-staged", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." ) - ctx.exit(1) - for tag in ret.json(): - name = tag["name"] - if name.startswith("v"): - name = name[1:] - if "-" in name: - # We're not going to parse dash tags - continue - if "docs" in name: - # We're not going to consider doc tags - continue - versions.add(Version(name)) - - # Now let's go through the github releases - ret = web.get(f"https://api.github.com/repos/{repository}/releases") - if ret.status_code != 200: - ctx.error( - f"Failed to get the releases for repository {repository!r}: {ret.reason}" + }, + }, +) +def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): + """ + Confirm that the passed version has been staged for release. + """ + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + for remote_path in (repo_release_files_path, repo_release_symlinks_path): + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + ctx.info( + f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." ) + s3.head_object( + Bucket=bucket_name, + Key=str(remote_path), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Could not get information about {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error(f"Could get information about {remote_path}: {exc}") + ctx.exit(1) + log.exception(f"Error getting information about {remote_path}: {exc}") ctx.exit(1) - for release in ret.json(): - name = release["name"] - if name.startswith("v"): - name = name[1:] - if name and "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases - versions.add(Version(name)) - name = release["tag_name"] - if "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases - versions.add(Version(name)) - return sorted(versions) + ctx.info(f"Version {salt_version} has been staged for release") + ctx.exit(0) def _get_repo_detailed_file_list( @@ -1458,7 +1475,7 @@ def _get_remote_versions(bucket_name: str, remote_path: str): def _create_onedir_based_repo( ctx: Context, salt_version: str, - nightly_build: bool, + nightly_build_from: str | None, repo_path: pathlib.Path, incoming: pathlib.Path, key_id: str, @@ -1467,23 +1484,29 @@ def _create_onedir_based_repo( ): ctx.info("Creating repository directory structure ...") create_repo_path = _create_top_level_repo_path( + ctx, repo_path, salt_version, distro, + nightly_build_from=nightly_build_from, ) # Export the GPG key in use tools.utils.export_gpg_key(ctx, key_id, create_repo_path) create_repo_path = _create_repo_path( - repo_path, salt_version, distro, nightly_build=nightly_build + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, ) - if nightly_build is False: + if not nightly_build_from: repo_json_path = create_repo_path.parent.parent / "repo.json" else: repo_json_path = create_repo_path.parent / "repo.json" - if nightly_build: - bucket_name = tools.utils.NIGHTLY_BUCKET_NAME + if nightly_build_from: + bucket_name = tools.utils.RELEASE_BUCKET_NAME else: bucket_name = tools.utils.STAGING_BUCKET_NAME @@ -1559,11 +1582,7 @@ def _create_onedir_based_repo( repo_json = _get_repo_json_file_contents( ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path ) - if nightly_build is True: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - + if nightly_build_from: ctx.info(f"Writing {repo_json_path} ...") repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) return @@ -1706,7 +1725,7 @@ def _publish_repo( Publish packaging repositories. """ if nightly_build: - bucket_name = tools.utils.NIGHTLY_BUCKET_NAME + bucket_name = tools.utils.RELEASE_BUCKET_NAME elif stage: bucket_name = tools.utils.STAGING_BUCKET_NAME else: @@ -1816,16 +1835,32 @@ def _publish_repo( def _create_top_level_repo_path( + ctx: Context, repo_path: pathlib.Path, salt_version: str, distro: str, distro_version: str | None = None, # pylint: disable=bad-whitespace distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build: bool = False, + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace ): create_repo_path = repo_path - if nightly_build: - create_repo_path = create_repo_path / "salt-dev" + if nightly_build_from: + create_repo_path = ( + create_repo_path + / "salt-dev" + / nightly_build_from + / datetime.utcnow().strftime("%Y-%m-%d") + ) + create_repo_path.mkdir(exist_ok=True, parents=True) + with ctx.chdir(create_repo_path.parent): + latest_nightly_symlink = pathlib.Path("latest") + if not latest_nightly_symlink.exists(): + ctx.info( + f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." + ) + latest_nightly_symlink.symlink_to( + create_repo_path.name, target_is_directory=True + ) elif "rc" in salt_version: create_repo_path = create_repo_path / "salt_rc" create_repo_path = create_repo_path / "salt" / "py3" / distro @@ -1838,20 +1873,24 @@ def _create_top_level_repo_path( def _create_repo_path( + ctx: Context, repo_path: pathlib.Path, salt_version: str, distro: str, distro_version: str | None = None, # pylint: disable=bad-whitespace distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build: bool = False, + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace ): create_repo_path = _create_top_level_repo_path( - repo_path, salt_version, distro, distro_version, distro_arch + ctx, + repo_path, + salt_version, + distro, + distro_version, + distro_arch, + nightly_build_from=nightly_build_from, ) - if nightly_build is False: - create_repo_path = create_repo_path / "minor" / salt_version - else: - create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d") + create_repo_path = create_repo_path / "minor" / salt_version create_repo_path.mkdir(exist_ok=True, parents=True) return create_repo_path diff --git a/tools/pre_commit.py b/tools/pre_commit.py index 1ecfc2b0050..af054876d80 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -77,7 +77,7 @@ def generate_workflows(ctx: Context): "lint": False, "pkg-tests": False, "salt-tests": False, - "test-pkg-downloads": False, + "test-pkg-downloads": True, }, }, } diff --git a/tools/release.py b/tools/release.py index f13661d090c..c74e6b6cb45 100644 --- a/tools/release.py +++ b/tools/release.py @@ -119,6 +119,93 @@ def upload_artifacts(ctx: Context, salt_version: str, artifacts_path: pathlib.Pa pass +@release.command( + name="download-onedir-artifact", + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + "platform": { + "help": "The onedir platform archive to download.", + "required": True, + "choices": ("linux", "windows", "darwin", "macos"), + }, + "arch": { + "help": "The onedir arch archive to download.", + "required": True, + }, + }, +) +def download_onedir_artifact( + ctx: Context, salt_version: str, platform: str = "linux", arch: str = "x86_64" +): + """ + Download onedir artifact from staging bucket. + """ + s3 = boto3.client("s3") + if platform == "macos": + platform = "darwin" + if arch == "arm64": + arch = "aarch64" + arch = arch.lower() + platform = platform.lower() + if platform in ("linux", "darwin") and arch not in ("x86_64", "aarch64"): + ctx.error( + f"The 'arch' value for {platform} must be one of: 'x86_64', 'aarch64', 'arm64'" + ) + ctx.exit(1) + if platform == "windows" and arch not in ("x86", "amd64"): + ctx.error(f"The 'arch' value for {platform} must be one of: 'x86', 'amd64'") + ctx.exit(1) + + archive_name = f"salt-{salt_version}-onedir-{platform}-{arch}.tar.xz" + archive_path = tools.utils.REPO_ROOT / "artifacts" / archive_name + if "rc" in salt_version: + prefix = "salt_rc/salt" + else: + prefix = "salt" + remote_path = f"{prefix}/py3/onedir/minor/{salt_version}/{archive_name}" + archive_path.parent.mkdir() + try: + ret = s3.head_object(Bucket=tools.utils.STAGING_BUCKET_NAME, Key=remote_path) + size = ret["ContentLength"] + with archive_path.open("wb") as wfh: + ctx.info( + f"Downloading s3://{tools.utils.STAGING_BUCKET_NAME}/{remote_path} to {archive_path} ..." + ) + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task( + description="Downloading ...", + total=size, + ) + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=remote_path, + Fileobj=wfh, + Callback=tools.utils.UpdateProgress(progress, task), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + elif exc.response["Error"]["Code"].startswith("4"): + ctx.error(f"Could not download {remote_path} from bucket: {exc}") + ctx.exit(1) + else: + log.exception(f"Failed to download {remote_path}: {exc}") + ctx.exit(1) + + if not archive_path.exists(): + ctx.error(f"The {archive_path} does not exist") + ctx.exit(1) + if not archive_path.stat().st_size: + ctx.error(f"The {archive_path} size is zero!") + ctx.exit(1) + + @release.command( name="upload-virustotal", arguments={ diff --git a/tools/utils.py b/tools/utils.py index 8c7c220e2b4..cb4379c61e0 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -19,7 +19,6 @@ from rich.progress import ( REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod" -NIGHTLY_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-nightly" STAGING_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-staging" RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release" BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup" @@ -119,3 +118,54 @@ class Version(packaging.version.Version): def __hash__(self): return hash(str(self)) + + +def get_salt_releases(ctx: Context, repository: str) -> list[Version]: + """ + Return a list of salt versions + """ + versions = set() + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + } + if "GITHUB_TOKEN" in os.environ: + headers["Authorization"] = f"Bearer {os.environ['GITHUB_TOKEN']}" + web.headers.update(headers) + ret = web.get(f"https://api.github.com/repos/{repository}/tags") + if ret.status_code != 200: + ctx.error( + f"Failed to get the tags for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + for tag in ret.json(): + name = tag["name"] + if name.startswith("v"): + name = name[1:] + if "-" in name: + # We're not going to parse dash tags + continue + if "docs" in name: + # We're not going to consider doc tags + continue + versions.add(Version(name)) + + # Now let's go through the github releases + ret = web.get(f"https://api.github.com/repos/{repository}/releases") + if ret.status_code != 200: + ctx.error( + f"Failed to get the releases for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + for release in ret.json(): + name = release["name"] + if name.startswith("v"): + name = name[1:] + if name and "-" not in name and "docs" not in name: + # We're not going to parse dash or docs releases + versions.add(Version(name)) + name = release["tag_name"] + if "-" not in name and "docs" not in name: + # We're not going to parse dash or docs releases + versions.add(Version(name)) + return sorted(versions) diff --git a/tools/vm.py b/tools/vm.py index 9500317f520..944f2fe6cc2 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -630,6 +630,11 @@ class VM: self.ctx.error(str(exc)) self.ctx.exit(1) instance_id_path.unlink() + except AttributeError: + # This machine no longer exists?! + instance_id_path.unlink() + self.ctx.info("It appears the cached image no longer exists...") + self.ctx.exit(1) if not instance_id_path.exists(): filters = [ {"Name": "tag:vm-name", "Values": [self.name]},