diff --git a/.github/actions/build-onedir-salt/action.yml b/.github/actions/build-onedir-salt/action.yml index 0e3888cada6..abac7a8d9e0 100644 --- a/.github/actions/build-onedir-salt/action.yml +++ b/.github/actions/build-onedir-salt/action.yml @@ -29,21 +29,13 @@ runs: steps: - - name: Download Cached Deps Onedir Package Directory - id: onedir-bare-cache - uses: ./.github/actions/cache + - name: Install Salt Packaging Dependencies into Relenv Onedir + uses: ./.github/actions/build-onedir-deps with: - path: artifacts/${{ inputs.package-name }} - key: > - ${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ - hashFiles( - format('{0}/.relenv/**/*.xz', github.workspace), - 'requirements/static/pkg/*/*.txt', - '.github/actions/build-onedir-deps/action.yml', - '.github/workflows/build-deps-onedir-*.yml', - 'cicd/shared-gh-workflows-context.yml' - ) - }} + platform: ${{ inputs.platform }} + arch: ${{ inputs.arch }} + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ inputs.salt-version }} - name: Download Source Tarball uses: actions/download-artifact@v4 diff --git a/.github/actions/ssh-tunnel/README.md b/.github/actions/ssh-tunnel/README.md index f6f03e5b2d5..b88b4e233f6 100644 --- a/.github/actions/ssh-tunnel/README.md +++ b/.github/actions/ssh-tunnel/README.md @@ -92,3 +92,9 @@ OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0 dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9 -- Message received -- ``` + +SSH to your local port. + +``` +ssh -o StrictHostKeychecking=no -o TCPKeepAlive=no -o StrictHostKeyChecking=no -vv -p 5222 runner@localhost +``` diff --git a/.github/actions/ssh-tunnel/rtcforward.py b/.github/actions/ssh-tunnel/rtcforward.py index a0972d300db..460bd5c8488 100644 --- a/.github/actions/ssh-tunnel/rtcforward.py +++ b/.github/actions/ssh-tunnel/rtcforward.py @@ -6,6 +6,7 @@ import io import json import logging import os +import signal import sys import textwrap import time @@ -77,6 +78,42 @@ def print_pastable(data, message="offer"): sys.stdout.flush() +async def read_from_stdin(): + loop = asyncio.get_event_loop() + line = await loop.run_in_executor( + None, input, "-- Please enter a message from remote party --\n" + ) + data = line + while line: + try: + line = await loop.run_in_executor(None, input) + except EOFError: + break + data += line + print("-- Message received --") + return data + + +class Channels: + def __init__(self, channels=None): + if channels is None: + channels = [] + self.channels = channels + + def add(self, channel): + self.channels.append(channel) + + def close(self): + for channel in self.channels: + channel.close() + + +class ProxyConnection: + def __init__(self, pc, channel): + self.pc = pc + self.channel = channel + + class ProxyClient: def __init__(self, args, channel): @@ -219,29 +256,7 @@ class ProxyServer: log.exception("WTF") -class ProxyConnection: - def __init__(self, pc, channel): - self.pc = pc - self.channel = channel - - -async def read_from_stdin(): - loop = asyncio.get_event_loop() - line = await loop.run_in_executor( - None, input, "-- Please enter a message from remote party --\n" - ) - data = line - while line: - try: - line = await loop.run_in_executor(None, input) - except EOFError: - break - data += line - print("-- Message received --") - return data - - -async def run_answer(pc, args): +async def run_answer(stop, pc, args): """ Top level offer answer server. """ @@ -270,11 +285,11 @@ async def run_answer(pc, args): elif obj is BYE: print("Exiting") - while True: + while not stop.is_set(): await asyncio.sleep(0.3) -async def run_offer(pc, args): +async def run_offer(stop, pc, args): """ Top level offer server this will estabilsh a data channel and start a tcp server on the port provided. New connections to the server will start the @@ -324,10 +339,14 @@ async def run_offer(pc, args): elif obj is BYE: print("Exiting") - while True: + while not stop.is_set(): await asyncio.sleep(0.3) +async def signal_handler(stop, pc): + stop.set() + + if __name__ == "__main__": if sys.platform == "win32": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) @@ -343,16 +362,22 @@ if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) - + stop = asyncio.Event() pc = RTCPeerConnection() if args.role == "offer": - coro = run_offer(pc, args) + coro = run_offer(stop, pc, args) else: - coro = run_answer(pc, args) + coro = run_answer(stop, pc, args) # run event loop loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) + for signame in ("SIGINT", "SIGTERM"): + loop.add_signal_handler( + getattr(signal, signame), + lambda: asyncio.create_task(signal_handler(stop, pc)), + ) + try: loop.run_until_complete(coro) except KeyboardInterrupt: diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml deleted file mode 100644 index a5f4f4fc6f6..00000000000 --- a/.github/workflows/build-deps-onedir.yml +++ /dev/null @@ -1,192 +0,0 @@ ---- -name: Build Packaging Dependencies Onedir - -on: - workflow_call: - inputs: - salt-version: - type: string - required: true - description: The Salt version to set prior to building packages. - cache-seed: - required: true - type: string - description: Seed used to invalidate caches - relenv-version: - required: true - type: string - description: The version of relenv to use - python-version: - required: true - type: string - description: The version of python to use with relenv - matrix: - required: true - type: string - description: Json job matrix config - linux_arm_runner: - required: true - type: string - description: Json job matrix config - -env: - RELENV_DATA: "${{ github.workspace }}/.relenv" - COLUMNS: 190 - AWS_MAX_ATTEMPTS: "10" - AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} - PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} - PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} - PIP_DISABLE_PIP_VERSION_CHECK: "1" - -jobs: - - build-deps-linux: - name: Linux - if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }} - runs-on: - - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }} - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(inputs.matrix)['linux'] }} - env: - USE_S3_CACHE: 'false' - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-seed }}|build-deps|linux|${{ matrix.arch }} - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: linux - arch: ${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: linux - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - build-deps-macos: - name: macOS - if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - include: ${{ fromJSON(inputs.matrix)['macos'] }} - runs-on: - - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }} - env: - USE_S3_CACHE: 'false' - PIP_INDEX_URL: https://pypi.org/simple - steps: - - - name: "Check cores" - shell: bash - run: sysctl -n hw.ncpu - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-seed }}|build-deps|macos - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: macos - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: macos - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - build-deps-windows: - name: Windows - if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - include: ${{ fromJSON(inputs.matrix)['windows'] }} - runs-on: windows-latest - env: - USE_S3_CACHE: 'false' - PIP_INDEX_URL: https://pypi.org/simple - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-seed }}|build-deps|windows|${{ matrix.arch }} - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: windows - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 5c3078b2c96..39c5578eb0b 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -108,7 +108,7 @@ jobs: - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }} env: PIP_INDEX_URL: https://pypi.org/simple - + USE_S3_CACHE: 'false' steps: - name: "Check cores" shell: bash @@ -125,16 +125,6 @@ jobs: with: python-version: "3.10" - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: macos - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: @@ -146,6 +136,16 @@ jobs: with: salt-version: "${{ inputs.salt-version }}" + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: macos + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + - name: Install Salt into Relenv Onedir uses: ./.github/actions/build-onedir-salt with: @@ -166,7 +166,7 @@ jobs: runs-on: windows-latest env: PIP_INDEX_URL: https://pypi.org/simple - + USE_S3_CACHE: 'false' steps: - name: "Throttle Builds" @@ -181,16 +181,6 @@ jobs: with: python-version: "3.10" - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: @@ -202,6 +192,16 @@ jobs: with: salt-version: "${{ inputs.salt-version }}" + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: windows + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + - name: Install Salt into Relenv Onedir uses: ./.github/actions/build-onedir-salt with: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db499c2bee5..24555cd88dc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -239,7 +239,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -257,7 +257,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -379,7 +379,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -390,7 +390,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -419,33 +419,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -459,8 +444,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -476,7 +461,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -493,7 +478,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -511,7 +496,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }} workflow-slug: ci default-timeout: 180 @@ -520,7 +505,7 @@ jobs: combine-all-code-coverage: name: Combine Code Coverage - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} runs-on: ubuntu-22.04 env: PIP_INDEX_URL: https://pypi.org/simple @@ -650,7 +635,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - prepare-workflow @@ -658,7 +643,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - combine-all-code-coverage - build-ci-deps diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e07118b20eb..28cef827373 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -238,7 +238,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -256,7 +256,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -378,7 +378,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -389,7 +389,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -418,33 +418,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -458,8 +443,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -478,8 +463,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "src" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -499,7 +484,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -516,7 +501,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -534,7 +519,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true workflow-slug: nightly default-timeout: 360 @@ -544,7 +529,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 environment: nightly needs: @@ -553,7 +538,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - build-pkgs-src - build-ci-deps diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml index 26c119aa69a..b6c2478bc24 100644 --- a/.github/workflows/pre-commit-action.yml +++ b/.github/workflows/pre-commit-action.yml @@ -48,14 +48,14 @@ jobs: cache-seed: ${{ inputs.cache-seed }} - name: Check ALL Files On Branch - if: github.event_name != 'pull_request' + if: ${{ !cancelled() && github.event_name != 'pull_request' }} env: SKIP: lint-salt,lint-tests,remove-import-headers,pyupgrade run: | pre-commit run --show-diff-on-failure --color=always --all-files - name: Check Changed Files On PR - if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] + if: ${{ !cancelled() && github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] }} env: SKIP: lint-salt,lint-tests GH_ACTIONS_ANNOTATE: "1" @@ -63,6 +63,6 @@ jobs: pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(inputs.changed-files)['repo_files'], ' ') }} - name: Check Docs On Deleted Files - if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['deleted'] + if: ${{ !cancelled() && github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['deleted'] }} run: | pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(inputs.changed-files)['deleted_files'], ' ') }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 794ecb486db..71ebbef5835 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -186,7 +186,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -422,7 +422,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - check-requirements diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 135e7a3f995..6f8ad5c8655 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -286,7 +286,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -304,7 +304,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -426,7 +426,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -437,7 +437,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -466,33 +466,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -506,8 +491,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -523,7 +508,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -540,7 +525,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -558,7 +543,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true workflow-slug: scheduled default-timeout: 360 @@ -568,7 +553,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - workflow-requirements @@ -578,7 +563,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - build-ci-deps - test-packages diff --git a/.github/workflows/ssh-debug.yml b/.github/workflows/ssh-debug.yml index 212e31c3e57..992f6b6a7b2 100644 --- a/.github/workflows/ssh-debug.yml +++ b/.github/workflows/ssh-debug.yml @@ -33,6 +33,22 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ssh-debug + + - name: Install Nox + run: | + python3 -m pip install 'nox==2022.8.7' + env: + PIP_INDEX_URL: https://pypi.org/simple + - uses: ./.github/actions/ssh-tunnel with: public_key: ${{ inputs.public_key }} diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 4668cf0cc79..16875879b5e 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -278,7 +278,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -296,7 +296,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -419,7 +419,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -430,7 +430,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -459,33 +459,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -499,8 +484,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -519,8 +504,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "src" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -540,7 +525,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -557,7 +542,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -575,7 +560,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true workflow-slug: staging default-timeout: 180 @@ -616,7 +601,7 @@ jobs: publish-pypi: name: Publish to PyPi(test) - if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} + if: ${{ !cancelled() && inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts @@ -670,11 +655,10 @@ jobs: draft-release: name: Draft Github Release - if: | - always() && (needs.test.result == 'success' || needs.test.result == 'skipped') && + if: ${{ !cancelled() && (needs.test.result == 'success' || needs.test.result == 'skipped') && (needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') && needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' && - needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' + needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' }} needs: - prepare-workflow - pre-commit @@ -695,7 +679,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - check-requirements @@ -704,7 +688,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - build-pkgs-src - upload-release-artifacts diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 208d74f63c1..e139f1d054d 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -29,7 +29,7 @@ lint: <%- do conclusion_needs.append('lint') %> name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -58,7 +58,7 @@ name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow steps: @@ -190,7 +190,7 @@ <{ job_name }>: <%- do conclusion_needs.append(job_name) %> name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow - build-source-tarball @@ -207,7 +207,7 @@ <{ job_name }>: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow - prepare-release @@ -238,37 +238,15 @@ <%- endif %> - - <%- set job_name = "build-deps-onedir" %> - <%- if includes.get(job_name, True) %> - - <{ job_name }>: - <%- do conclusion_needs.append(job_name) %> - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "<{ relenv_version }>" - python-version: "<{ python_version }>" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - - <%- endif %> - <%- set job_name = "build-salt-onedir" %> <%- if includes.get(job_name, True) %> <{ job_name }>: <%- do conclusion_needs.append(job_name) %> name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: @@ -306,7 +284,7 @@ combine-all-code-coverage: <%- do conclusion_needs.append("combine-all-code-coverage") %> name: Combine Code Coverage - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} runs-on: ubuntu-22.04 env: PIP_INDEX_URL: https://pypi.org/simple diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index fa69221b468..175e12c98c5 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -320,7 +320,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 <%- if workflow_slug == "nightly" %> environment: <{ workflow_slug }> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index c823da809da..2c0b9e0c446 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -116,7 +116,7 @@ on: publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> name: Publish to PyPi(test) - if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} + if: ${{ !cancelled() && inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts @@ -178,11 +178,10 @@ on: draft-release: name: Draft Github Release - if: | - always() && (needs.test.result == 'success' || needs.test.result == 'skipped') && + if: ${{ !cancelled() && (needs.test.result == 'success' || needs.test.result == 'skipped') && (needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') && needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' && - needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' + needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' }} needs: - prepare-workflow - pre-commit diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 6ac1fdccba2..7bef19bf189 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -71,7 +71,7 @@ jobs: test-linux: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }} runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }} - if: toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]' }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} @@ -80,10 +80,6 @@ jobs: matrix: include: ${{ fromJSON(inputs.matrix)['linux-x86_64'] }} steps: - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - name: "Throttle Builds" shell: bash @@ -98,6 +94,16 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Setup Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + - name: Free Disk Space Before Build run: | echo "Disk space before cleanup:" @@ -154,39 +160,28 @@ jobs: free -h - name: "Create container ${{ matrix.container }}" + env: + GITHUB_ACTIONS: true + CI: true + SKIP_REQUIREMENTS_INSTALL: 1 + PRINT_TEST_SELECTION: 0 + PRINT_TEST_PLAN_ONLY: 0 + PRINT_SYSTEM_INFO: 0 + RERUN_FAILURES: 1 + GITHUB_ACTIONS_PIPELINE: 1 + SKIP_INITIAL_ONEDIR_FAILURES: 1 + SKIP_INITIAL_GH_ACTIONS_FAILURES: 1 + SKIP_CODE_COVERAGE: ${{ inputs.skip-code-coverage && '1' || '0' }} + CONVERAGE_CONTEXT: ${{ matrix.slug }} + COLUMNS: 190 + PIP_INDEX_URL: "${{ vars.PIP_INDEX_URL }}" + PIP_TRUSTED_HOST: "${{ vars.PIP_TRUSTED_HOST }}" + PIP_EXTRA_INDEX_URL: "${{ vars.PIP_EXTRA_INDEX_URL }}" + PIP_DISABLE_PIP_VERSION_CHECK: 1 + RAISE_DEPRECATIONS_RUNTIME_ERRORS: 1 + SALT_TRANSPORT: ${{ matrix.transport }} run: | - /usr/bin/docker \ - create --name ${{ github.run_id }}_salt-test \ - --workdir /__w/salt/salt \ - --privileged \ - --ulimit="nofile=262144:262144" \ - -e "HOME=/github/home" \ - -e GITHUB_ACTIONS=true \ - -e CI=true \ - -e SKIP_REQUIREMENTS_INSTALL=1 \ - -e PRINT_TEST_SELECTION=0 \ - -e PRINT_TEST_PLAN_ONLY=0 \ - -e PRINT_SYSTEM_INFO=0 \ - -e RERUN_FAILURES=1 \ - -e GITHUB_ACTIONS_PIPELINE=1 \ - -e SKIP_INITIAL_ONEDIR_FAILURES=1 \ - -e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \ - -e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \ - -e CONVERAGE_CONTEXT=${{ matrix.slug }} \ - -e COLUMNS=190 \ - -e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \ - -e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \ - -e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \ - -e PIP_DISABLE_PIP_VERSION_CHECK="1" \ - -e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \ - -e SALT_TRANSPORT=${{ matrix.transport }} \ - -e LANG="en_US.UTF-8" \ - -e SHELL=/bin/bash \ - -v "/home/runner/work":"/__w" \ - -v "/tmp/":"/var/lib/docker" \ - --entrypoint "/usr/lib/systemd/systemd" \ - ${{ matrix.container }} \ - --systemd --unit rescue.target + tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test - name: "Start container ${{ matrix.container }}" run: | @@ -387,7 +382,7 @@ jobs: test-linux-arm64: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }} runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-22.04' || inputs.linux_arm_runner }} - if: toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]' }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} @@ -396,10 +391,6 @@ jobs: matrix: include: ${{ fromJSON(inputs.matrix)['linux-arm64'] }} steps: - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - name: "Throttle Builds" shell: bash @@ -411,9 +402,25 @@ jobs: run: | echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV" + - name: Checkout Source Code uses: actions/checkout@v4 + - name: Setup Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + + - name: "Set `TIMESTAMP` environment variable" + shell: bash + run: | + echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV" + - name: Free Disk Space Before Build run: | echo "Disk space before cleanup:" @@ -470,39 +477,28 @@ jobs: free -h - name: "Create container ${{ matrix.container }}" + env: + GITHUB_ACTIONS: true + CI: true + SKIP_REQUIREMENTS_INSTALL: 1 + PRINT_TEST_SELECTION: 0 + PRINT_TEST_PLAN_ONLY: 0 + PRINT_SYSTEM_INFO: 0 + RERUN_FAILURES: 1 + GITHUB_ACTIONS_PIPELINE: 1 + SKIP_INITIAL_ONEDIR_FAILURES: 1 + SKIP_INITIAL_GH_ACTIONS_FAILURES: 1 + SKIP_CODE_COVERAGE: ${{ inputs.skip-code-coverage && '1' || '0' }} + CONVERAGE_CONTEXT: ${{ matrix.slug }} + COLUMNS: 190 + PIP_INDEX_URL: "${{ vars.PIP_INDEX_URL }}" + PIP_TRUSTED_HOST: "${{ vars.PIP_TRUSTED_HOST }}" + PIP_EXTRA_INDEX_URL: "${{ vars.PIP_EXTRA_INDEX_URL }}" + PIP_DISABLE_PIP_VERSION_CHECK: 1 + RAISE_DEPRECATIONS_RUNTIME_ERRORS: 1 + SALT_TRANSPORT: ${{ matrix.transport }} run: | - /usr/bin/docker \ - create --name ${{ github.run_id }}_salt-test \ - --workdir /__w/salt/salt \ - --privileged \ - --ulimit="nofile=262144:262144" \ - -e "HOME=/github/home" \ - -e GITHUB_ACTIONS=true \ - -e CI=true \ - -e SKIP_REQUIREMENTS_INSTALL=1 \ - -e PRINT_TEST_SELECTION=0 \ - -e PRINT_TEST_PLAN_ONLY=0 \ - -e PRINT_SYSTEM_INFO=0 \ - -e RERUN_FAILURES=1 \ - -e GITHUB_ACTIONS_PIPELINE=1 \ - -e SKIP_INITIAL_ONEDIR_FAILURES=1 \ - -e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \ - -e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \ - -e CONVERAGE_CONTEXT=${{ matrix.slug }} \ - -e COLUMNS=190 \ - -e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \ - -e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \ - -e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \ - -e PIP_DISABLE_PIP_VERSION_CHECK="1" \ - -e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \ - -e SALT_TRANSPORT=${{ matrix.transport }} \ - -e LANG="en_US.UTF-8" \ - -e SHELL=/bin/bash \ - -v "/home/runner/work":"/__w" \ - -v "/tmp/":"/var/lib/docker" \ - --entrypoint "/usr/lib/systemd/systemd" \ - ${{ matrix.container }} \ - --systemd --unit rescue.target + tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test - name: "Start container ${{ matrix.container }}" run: | @@ -705,7 +701,7 @@ jobs: runs-on: ${{ matrix.runner }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours - if: toJSON(fromJSON(inputs.matrix)['macos']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} strategy: fail-fast: false @@ -726,6 +722,11 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Setup Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + - name: Setup Salt Version run: | echo "${{ inputs.salt-version }}" > salt/_version.txt @@ -751,12 +752,6 @@ jobs: uses: actions/download-artifact@v4 with: name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }} - - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' @@ -983,7 +978,7 @@ jobs: test-windows: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }} - if: toJSON(fromJSON(inputs.matrix)['windows']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} runs-on: ${{ matrix.slug }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours @@ -1031,7 +1026,7 @@ jobs: run: | echo true - - name: Set up Python ${{ inputs.python-version }} + - name: Setup Python ${{ inputs.python-version }} uses: actions/setup-python@v5 with: python-version: "${{ inputs.python-version }}" @@ -1349,7 +1344,7 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} + - name: Setup Python ${{ inputs.python-version }} uses: actions/setup-python@v5 with: python-version: "${{ inputs.python-version }}" diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index d5a8c9ee8e1..50c9cc98770 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -65,7 +65,7 @@ jobs: test-linux: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }} - if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }} + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: fail-fast: false @@ -86,6 +86,16 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + - name: Download Packages uses: actions/download-artifact@v4 with: @@ -105,11 +115,6 @@ jobs: cd artifacts tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' @@ -135,7 +140,7 @@ jobs: - name: "Create container ${{ matrix.container }}" run: | - /usr/bin/docker create --name ${{ github.run_id }}_salt-test-pkg --workdir /__w/salt/salt --privileged -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true -v "/var/run/docker.sock":"/var/run/docker.sock" -v "/home/runner/work":"/__w" -v "/home/runner/work/_temp":"/__w/_temp" -v "/home/runner/work/_actions":"/__w/_actions" -v "/opt/hostedtoolcache":"/__t" -v "/home/runner/work/_temp/_github_home":"/github/home" -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" --entrypoint "/usr/lib/systemd/systemd" ${{ matrix.container }} --systemd --unit rescue.target + tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test-pkg - name: "Start container ${{ matrix.container }}" run: | @@ -145,31 +150,21 @@ jobs: run: | docker exec ${{ github.run_id}}_salt-test-pkg python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-prefix }} - - name: List Free Space run: | df -h || true - name: Show System Info - env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_SYSTEM_INFO_ONLY: "1" run: | - docker exec ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} + docker exec \ + -e SKIP_REQUIREMENTS_INSTALL=1 \ + -e PRINT_SYSTEM_INFO_ONLY=1 \ + ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} - name: Run Package Tests - env: - SKIP_REQUIREMENTS_INSTALL: "1" - RERUN_FAILURES: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - COVERAGE_CONTEXT: ${{ matrix.slug }} run: | - /usr/bin/docker exec ${{ github.run_id }}_salt-test-pkg \ + docker exec \ + ${{ github.run_id }}_salt-test-pkg \ python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}} @@ -197,7 +192,7 @@ jobs: test-macos: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} runs-on: ${{ matrix.runner }} - if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time) strategy: fail-fast: false @@ -325,7 +320,7 @@ jobs: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} runs-on: ${{ matrix.slug }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong - if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} strategy: fail-fast: false matrix: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index eb3e9a231e6..17e7355f05f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: # case-insensitive filesystem like MacOS HFS+ or Windows FAT. - id: check-json # Attempts to load all json files to verify syntax. - id: check-symlinks # Checks for symlinks which do not point to anything. - - id: debug-statements # Check for debugger imports and py37+ breakpoint() calls in python source. + - id: debug-statements # Check for debugger imports and breakpoint() calls in python source. exclude: > (?x)^( templates/.* @@ -145,21 +145,6 @@ repos: rev: "4.8" hooks: - - id: pip-tools-compile - alias: compile-pkg-linux-3.8-zmq-requirements - name: Linux Packaging Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.8/linux\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --no-emit-index-url - - requirements/static/pkg/linux.in - - id: pip-tools-compile alias: compile-pkg-linux-3.9-zmq-requirements name: Linux Packaging Py3.9 ZeroMQ Requirements @@ -235,22 +220,6 @@ repos: - --no-emit-index-url - requirements/static/pkg/linux.in - - - id: pip-tools-compile - alias: compile-pkg-freebsd-3.8-zmq-requirements - name: FreeBSD Packaging Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.8/freebsd\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=freebsd - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --no-emit-index-url - - requirements/static/pkg/freebsd.in - - id: pip-tools-compile alias: compile-pkg-freebsd-3.9-zmq-requirements name: FreeBSD Packaging Py3.9 ZeroMQ Requirements @@ -396,21 +365,6 @@ repos: - --no-emit-index-url - requirements/static/pkg/darwin.in - - - id: pip-tools-compile - alias: compile-pkg-windows-3.8-zmq-requirements - name: Windows Packaging Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.8/windows\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=windows - - --include=requirements/windows.txt - - --no-emit-index-url - - requirements/static/pkg/windows.in - - id: pip-tools-compile alias: compile-pkg-windows-3.9-zmq-requirements name: Windows Packaging Py3.9 ZeroMQ Requirements @@ -485,23 +439,6 @@ repos: # <---- Packaging Requirements ------------------------------------------------------------------------------------- # ----- CI Requirements -------------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-linux-3.8-zmq-requirements - name: Linux CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.8/linux\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - - --include=requirements/static/pkg/linux.in - - --include=requirements/static/ci/common.in - - --no-emit-index-url - - requirements/static/ci/linux.in - id: pip-tools-compile alias: compile-ci-linux-3.9-zmq-requirements @@ -593,21 +530,6 @@ repos: - --no-emit-index-url - requirements/static/ci/linux.in - - - id: pip-tools-compile - alias: compile-ci-linux-crypto-3.8-requirements - name: Linux CI Py3.8 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/linux-crypto\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --out-prefix=linux - - --no-emit-index-url - - requirements/static/ci/crypto.in - - id: pip-tools-compile alias: compile-ci-linux-crypto-3.9-requirements name: Linux CI Py3.9 Crypto Requirements @@ -678,24 +600,6 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in - - id: pip-tools-compile - alias: compile-ci-freebsd-3.8-zmq-requirements - name: FreeBSD CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/((freebsd|common)\.in|py3\.8/freebsd\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=freebsd - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - - --include=requirements/static/pkg/freebsd.in - - --include=requirements/static/ci/common.in - - --no-emit-index-url - - requirements/static/ci/freebsd.in - - id: pip-tools-compile alias: compile-ci-freebsd-3.9-zmq-requirements name: FreeBSD CI Py3.9 ZeroMQ Requirements @@ -786,23 +690,10 @@ repos: - --no-emit-index-url - requirements/static/ci/freebsd.in - - id: pip-tools-compile - alias: compile-ci-freebsd-crypto-3.8-requirements - name: FreeBSD CI Py3.8 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/freebsd-crypto\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=freebsd - - --out-prefix=freebsd - - --no-emit-index-url - - requirements/static/ci/crypto.in - - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.9-requirements name: FreeBSD CI Py3.9 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.9/freebsd-crypto\.txt))$ pass_filenames: false args: @@ -1027,24 +918,6 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in - - - id: pip-tools-compile - alias: compile-ci-windows-3.8-zmq-requirements - name: Windows CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/((windows|common)\.in|py3\.8/windows\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=windows - - --include=requirements/windows.txt - - --include=requirements/pytest.txt - - --include=requirements/static/pkg/windows.in - - --include=requirements/static/ci/common.in - - --no-emit-index-url - - requirements/static/ci/windows.in - - id: pip-tools-compile alias: compile-ci-windows-3.9-zmq-requirements name: Windows CI Py3.9 ZeroMQ Requirements @@ -1130,21 +1003,6 @@ repos: - --no-emit-index-url - requirements/static/ci/windows.in - - - id: pip-tools-compile - alias: compile-ci-windows-crypto-3.8-requirements - name: Windows CI Py3.8 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/windows-crypto\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=windows - - --out-prefix=windows - - --no-emit-index-url - - requirements/static/ci/crypto.in - - id: pip-tools-compile alias: compile-ci-windows-crypto-3.9-requirements name: Windows CI Py3.9 Crypto Requirements @@ -1219,18 +1077,6 @@ repos: # <---- CI Requirements -------------------------------------------------------------------------------------------- # ----- Cloud CI Requirements -------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-cloud-3.8-requirements - name: Cloud CI Py3.8 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.8/cloud\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --no-emit-index-url - - requirements/static/ci/cloud.in - - id: pip-tools-compile alias: compile-ci-cloud-3.9-requirements name: Cloud CI Py3.9 Requirements @@ -1303,19 +1149,6 @@ repos: # <---- Cloud CI Requirements -------------------------------------------------------------------------------------- # ----- Doc CI Requirements ---------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-doc-requirements - name: Docs CI Py3.8 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --no-emit-index-url - - requirements/static/ci/docs.in - - id: pip-tools-compile alias: compile-doc-requirements name: Docs CI Py3.9 Requirements @@ -1390,19 +1223,6 @@ repos: # ----- Lint CI Requirements --------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-lint-3.8-requirements - name: Lint CI Py3.8 Requirements - files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.8/linux\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --no-emit-index-url - - requirements/static/ci/lint.in - - id: pip-tools-compile alias: compile-ci-lint-3.9-requirements name: Lint CI Py3.9 Requirements @@ -1481,19 +1301,6 @@ repos: # <---- Lint CI Requirements --------------------------------------------------------------------------------------- # ----- Changelog -------------------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-changelog-3.8-requirements - name: Changelog CI Py3.8 Requirements - files: ^requirements/static/ci/(changelog\.in|py3\.8/(changelog|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --no-emit-index-url - - requirements/static/ci/changelog.in - - id: pip-tools-compile alias: compile-ci-changelog-3.9-requirements name: Changelog CI Py3.9 Requirements @@ -1677,8 +1484,8 @@ repos: rev: v3.15.1 hooks: - id: pyupgrade - name: Upgrade code for Py3.8+ - args: [--py38-plus, --keep-mock, --keep-runtime-typing] + name: Upgrade code to Py3.10+ + args: [--py310-plus, --keep-mock] exclude: > (?x)^( salt/client/ssh/ssh_py_shim.py diff --git a/changelog/44736.fixed.md b/changelog/44736.fixed.md new file mode 100644 index 00000000000..eee06decc06 --- /dev/null +++ b/changelog/44736.fixed.md @@ -0,0 +1,2 @@ +Commands on Windows are now prefixed with ``cmd /c`` so that compound +commands (commands separated by ``&&``) run properly when using ``runas`` diff --git a/changelog/59977.fixed.md b/changelog/59977.fixed.md new file mode 100644 index 00000000000..9069a8d621b --- /dev/null +++ b/changelog/59977.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue on Windows where checking success_retcodes when using the +runas parameter would fail. Now success_retcodes are checked correctly diff --git a/changelog/60884.fixed.md b/changelog/60884.fixed.md new file mode 100644 index 00000000000..85f074e7b67 --- /dev/null +++ b/changelog/60884.fixed.md @@ -0,0 +1,2 @@ +Fix an issue with cmd.script in Windows so that the exit code from a script will +be passed through to the retcode of the state diff --git a/changelog/61416.fixed.md b/changelog/61416.fixed.md new file mode 100644 index 00000000000..58dd7a27783 --- /dev/null +++ b/changelog/61416.fixed.md @@ -0,0 +1 @@ +Ensure file clients for runner, wheel, local and caller are available from the client_cache if called upon. diff --git a/changelog/66592.fixed.md b/changelog/66592.fixed.md new file mode 100644 index 00000000000..228e35292b0 --- /dev/null +++ b/changelog/66592.fixed.md @@ -0,0 +1 @@ +Fix minion config option startup_states diff --git a/changelog/66637.fixed.md b/changelog/66637.fixed.md new file mode 100644 index 00000000000..12b6759245f --- /dev/null +++ b/changelog/66637.fixed.md @@ -0,0 +1,4 @@ +Fixes an issue when getting account names using the get_name function in the +win_dacl.py salt util. Capability SIDs return ``None``. SIDs for deleted +accounts return the SID. SIDs for domain accounts where the system is not +connected to the domain return the SID. diff --git a/changelog/66932.fixed.md b/changelog/66932.fixed.md new file mode 100644 index 00000000000..6209b057fed --- /dev/null +++ b/changelog/66932.fixed.md @@ -0,0 +1 @@ +Ensure minion start event coroutines are run diff --git a/changelog/67057.fixed.md b/changelog/67057.fixed.md new file mode 100644 index 00000000000..2f719be4cde --- /dev/null +++ b/changelog/67057.fixed.md @@ -0,0 +1 @@ +Added support for dnf5 (backport from 3007) and update to its new command syntax changes since 2023 diff --git a/changelog/67091.fixed.md b/changelog/67091.fixed.md new file mode 100644 index 00000000000..8b4ef596813 --- /dev/null +++ b/changelog/67091.fixed.md @@ -0,0 +1 @@ +Fix yumpkg module for Python<3.8 diff --git a/changelog/67171.fixed.md b/changelog/67171.fixed.md new file mode 100644 index 00000000000..d0d51a507ac --- /dev/null +++ b/changelog/67171.fixed.md @@ -0,0 +1,3 @@ +Fix a stacktrace on Windows with pkg.installed and test=True. The +`pkg.list_repo_pkgs` function does not exist on Windows. This uses the +`pkg.list_available` function instead for Windows. diff --git a/changelog/67177.fixed.md b/changelog/67177.fixed.md new file mode 100644 index 00000000000..e91c8faead8 --- /dev/null +++ b/changelog/67177.fixed.md @@ -0,0 +1 @@ +Added support and tests for dnf5 to services_need_restart for yum packages diff --git a/changelog/67184.removed.md b/changelog/67184.removed.md new file mode 100644 index 00000000000..a088d8bcfbd --- /dev/null +++ b/changelog/67184.removed.md @@ -0,0 +1 @@ +Removed dependency on bsdmainutils package for Debian and Ubuntu diff --git a/changelog/67722.fixed.md b/changelog/67722.fixed.md new file mode 100644 index 00000000000..19d49f3f55d --- /dev/null +++ b/changelog/67722.fixed.md @@ -0,0 +1 @@ +Use os.walk to traverse git branches, and no longer replace slash '/' in git branch names diff --git a/changelog/67729.deprecated.md b/changelog/67729.deprecated.md new file mode 100644 index 00000000000..9f736524b67 --- /dev/null +++ b/changelog/67729.deprecated.md @@ -0,0 +1 @@ +Removed support for end of life Python 3.7 and 3.8 from pre-commit and requirements diff --git a/changelog/67733.fixed.md b/changelog/67733.fixed.md new file mode 100644 index 00000000000..242f65ec762 --- /dev/null +++ b/changelog/67733.fixed.md @@ -0,0 +1 @@ +Set correct virtual grain in systemd based Podman containers diff --git a/changelog/67743.fixed.md b/changelog/67743.fixed.md new file mode 100644 index 00000000000..2e926595677 --- /dev/null +++ b/changelog/67743.fixed.md @@ -0,0 +1 @@ +Corrected option --upgrades for dnf[5] for function list_upgrades diff --git a/changelog/67769.fixed.md b/changelog/67769.fixed.md new file mode 100644 index 00000000000..bd7da12a7f0 --- /dev/null +++ b/changelog/67769.fixed.md @@ -0,0 +1 @@ +Corrected dnf5 option --downloadonly for dnf5 install diff --git a/changelog/67792.fixed.md b/changelog/67792.fixed.md new file mode 100644 index 00000000000..ca00fc31ccf --- /dev/null +++ b/changelog/67792.fixed.md @@ -0,0 +1,2 @@ +Upgrade relenv to 0.18.1. Which includes python 3.10.16 and openssl 3.2.4. +Openssl 3.2.4 fixes CVE-2024-12797 and CVE-2024-13176 diff --git a/changelog/67794.fixed.md b/changelog/67794.fixed.md new file mode 100644 index 00000000000..0805655b1dc --- /dev/null +++ b/changelog/67794.fixed.md @@ -0,0 +1,2 @@ +Update jinja2 to 3.1.5, advisories GHSA-q2x7-8rv6-6q7h and GHSA-gmj6-6f8f-6699 +Update urllib3 to 1.26.18 advisories GHSA-34jh-p97f-mpxf diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index d9a08d10f46..9fa346fe303 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,6 +1,6 @@ nox_version: "2022.8.7" -python_version: "3.10.15" -relenv_version: "0.18.0" +python_version: "3.10.16" +relenv_version: "0.18.1" release_branches: - "3006.x" - "3007.x" diff --git a/pkg/debian/control b/pkg/debian/control index c08d99d5e23..4f1a2b96352 100644 --- a/pkg/debian/control +++ b/pkg/debian/control @@ -79,8 +79,7 @@ Package: salt-minion Architecture: amd64 arm64 Replaces: salt-common (<= 3006.4) Breaks: salt-common (<= 3006.4) -Depends: bsdmainutils, - dctrl-tools, +Depends: dctrl-tools, salt-common (= ${source:Version}), ${misc:Depends} Recommends: debconf-utils, dmidecode, net-tools diff --git a/pkg/windows/install_salt.ps1 b/pkg/windows/install_salt.ps1 index 670ea38a473..50bb0a71f5f 100644 --- a/pkg/windows/install_salt.ps1 +++ b/pkg/windows/install_salt.ps1 @@ -197,6 +197,25 @@ if ( $PKG ) { } } +# Create pywin32.pth file +if ( -not ( Test-Path -Path "$SCRIPTS_DIR\pywin32.pth" ) ) { + Write-Host "Creating pywin32.pth file: " -NoNewline + $content = "# .pth file for the PyWin32 extensions`n" + ` + "win32`n" + ` + "win32\lib" + ` + "Pythonwin" + ` + "# And some hackery to deal with environments where the post_install script`n" + ` + "# isn't run." + ` + "import pywin32_bootstrap" + Set-Content -Path "$SCRIPTS_DIR\pywin32.pth" -Value $content + if ( Test-Path -Path "$SCRIPTS_DIR\pywin32.pth") { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } +} + # Remove PyWin32 PostInstall & testall scripts if ( Test-Path -Path "$SCRIPTS_DIR\pywin32_*" ) { Write-Host "Removing pywin32 post-install scripts: " -NoNewline diff --git a/pkg/windows/msi/build_pkg.ps1 b/pkg/windows/msi/build_pkg.ps1 index 6eb028f9d76..1f15fd763b3 100644 --- a/pkg/windows/msi/build_pkg.ps1 +++ b/pkg/windows/msi/build_pkg.ps1 @@ -51,14 +51,11 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) { $filename = Split-Path $local_file -leaf if ( Test-Path -Path $local_file ) { Write-Host "Verifying hash for $filename`: " -NoNewline - $hash = (Get-FileHash $local_file).Hash - if ( $hash -eq $SHA256 ) { + if ( (Get-FileHash $local_file).Hash -eq $SHA256 ) { Write-Result "Verified" -ForegroundColor Green return } else { Write-Result "Failed Hash" -ForegroundColor Red - Write-Host "Found Hash: $hash" - Write-Host "Expected Hash: $SHA256" Remove-Item -Path $local_file -Force } } @@ -90,11 +87,6 @@ $SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").Direct $RUNTIME_DIR = [System.Runtime.InteropServices.RuntimeEnvironment]::GetRuntimeDirectory() $CSC_BIN = "$RUNTIME_DIR\csc.exe" -[DateTime]$origin = "1970-01-01 00:00:00" -$hash_time = $(git show -s --format=%at) -$TIME_STAMP = $origin.AddSeconds($hash_time) - - if ( $BUILD_ARCH -eq "64bit" ) { $BUILD_ARCH = "AMD64" } else { @@ -479,71 +471,6 @@ $states | ForEach-Object { } Write-Result "Success" -ForegroundColor Green -#------------------------------------------------------------------------------- -# Remove compiled files -#------------------------------------------------------------------------------- -# We have to do this again because we use the Relenv Python to get the build -# architecture. This recreates some of the pycache files that were removed -# in the prep_salt script -Write-Host "Removing __pycache__ directories: " -NoNewline -$found = Get-ChildItem -Path "$BUILDENV_DIR" -Filter "__pycache__" -Recurse -$found | ForEach-Object { - Remove-Item -Path "$($_.FullName)" -Recurse -Force - if ( Test-Path -Path "$($_.FullName)" ) { - Write-Result "Failed" -ForegroundColor Red - Write-Host "Failed to remove: $($_.FullName)" - exit 1 - } -} -Write-Result "Success" -ForegroundColor Green - -# If we try to remove *.pyc with the same Get-ChildItem that we used to remove -# __pycache__ directories, it won't be able to find them because they are no -# longer present -# This probably won't find any *.pyc files, but just in case -$remove = "*.pyc", - "*.chm" -$remove | ForEach-Object { - Write-Host "Removing unneeded $_ files: " -NoNewline - $found = Get-ChildItem -Path "$BUILDENV_DIR" -Filter $_ -Recurse - $found | ForEach-Object { - Remove-Item -Path "$($_.FullName)" -Recurse -Force - if ( Test-Path -Path "$($_.FullName)" ) { - Write-Result "Failed" -ForegroundColor Red - Write-Host "Failed to remove: $($_.FullName)" - exit 1 - } - } - Write-Result "Success" -ForegroundColor Green -} - -#------------------------------------------------------------------------------- -# Set timestamps on Files -#------------------------------------------------------------------------------- -# We're doing this on the dlls that were created above - -Write-Host "Setting time stamp on all files: " -NoNewline -$found = Get-ChildItem -Path $BUILDENV_DIR -Recurse -$found | ForEach-Object { - $_.CreationTime = $TIME_STAMP - $_.LastAccessTime = $TIME_STAMP - $_.LastWriteTime = $TIME_STAMP -} -Write-Result "Success" -ForegroundColor Green - -Write-Host "Setting time stamp on installer dlls: " -NoNewline -$found = Get-ChildItem -Path $SCRIPT_DIR -Filter "*.dll" -Recurse -$found | ForEach-Object { - $_.CreationTime = $TIME_STAMP - $_.LastAccessTime = $TIME_STAMP - $_.LastWriteTime = $TIME_STAMP -} -Write-Result "Success" -ForegroundColor Green - -#------------------------------------------------------------------------------- -# Let's start building the MSI -#------------------------------------------------------------------------------- - # move conf folder up one dir because it must not be discovered twice and xslt is difficult Write-Host "Remove configs from discovery: " -NoNewline Move-Item -Path "$DISCOVER_CONFDIR" ` diff --git a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi index a93fd009171..346df26fdfe 100644 --- a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi +++ b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi @@ -1,3 +1,5 @@ +# This file must be UNICODE + !define PRODUCT_NAME "Salt Minion" !define PRODUCT_PUBLISHER "SaltStack, Inc" !define PRODUCT_WEB_SITE "http://saltproject.io" @@ -9,7 +11,6 @@ !define PRODUCT_RUN_REGKEY "Software\Microsoft\Windows\CurrentVersion\App Paths\salt-run.exe" !define PRODUCT_UNINST_KEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}" !define PRODUCT_UNINST_ROOT_KEY "HKLM" -!define /date TIME_STAMP "%Y-%m-%d-%H-%M-%S" # Request admin rights RequestExecutionLevel admin @@ -25,6 +26,7 @@ RequestExecutionLevel admin !include "WinVer.nsh" !include "x64.nsh" ${StrLoc} +${StrRep} ${StrStrAdv} # Required by MoveFileFolder.nsh @@ -47,7 +49,7 @@ ${StrStrAdv} !endif # Get Estimated Size from CLI argument /DEstimatedSize -!ifdef PythonArchitecture +!ifdef EstimatedSize !define ESTIMATED_SIZE "${EstimatedSize}" !else # Default @@ -66,11 +68,26 @@ ${StrStrAdv} !define BUILD_TYPE "Python 3" !define OUTFILE "Salt-Minion-${PRODUCT_VERSION}-Py3-${CPUARCH}-Setup.exe" -# Part of the Trim function for Strings +VIProductVersion "1.0.0.0" # This actually updates File Version +VIAddVersionKey FileVersion "1.0.0.0" # This doesn't seem to do anything, but you'll get a warning without it +VIAddVersionKey "ProductName" "${PRODUCT_NAME}" +VIAddVersionKey "LegalTrademarks" "${PRODUCT_NAME} is a trademark of ${PRODUCT_PUBLISHER}" +VIAddVersionKey "LegalCopyright" "© ${PRODUCT_PUBLISHER}" +VIAddVersionKey "FileDescription" "${PRODUCT_NAME} Installer" +VIAddVersionKey "ProductVersion" "${PRODUCT_VERSION}" + +################################################################################ +# Early defines +################################################################################ + !define Trim "!insertmacro Trim" !macro Trim ResultVar String Push "${String}" - Call Trim + !ifdef __UNINSTALL__ + Call un.Trim + !else + Call Trim + !endif Pop "${ResultVar}" !macroend @@ -83,14 +100,50 @@ ${StrStrAdv} Pop "${Length}" !macroend -# Part of the StrContains function for Strings -!define StrContains "!insertmacro StrContains" -!macro StrContains OUT NEEDLE HAYSTACK - Push "${HAYSTACK}" - Push "${NEEDLE}" - Call StrContains - Pop "${OUT}" +# Variables for Logging +Var LogFile +Var TimeStamp +Var cmdLineParams +Var logFileHandle +Var msg + +# Followed this: https://nsis.sourceforge.io/StrRep +!define LogMsg '!insertmacro LogMsg' +!macro LogMsg _msg + Push "${_msg}" + !ifdef __UNINSTALL__ + Call un.LogMsg + !else + Call LogMsg + !endif !macroend +!macro Func_LogMsg un + Function ${un}LogMsg + Pop $msg + ${If} $TimeStamp == "" + ${GetTime} "" "L" $0 $1 $2 $3 $4 $5 $6 + StrCpy $TimeStamp "$2-$1-$0_$4-$5-$6" + ${EndIf} + ${If} $LogFile == "" + !ifdef __UNINSTALL__ + StrCpy $LogFile "$TEMP\SaltInstaller\$TimeStamp-uninstall.log" + !else + StrCpy $LogFile "$TEMP\SaltInstaller\$TimeStamp-install.log" + !endif + ${IfNot} ${FileExists} "$TEMP\SaltInstaller\*.*" + CreateDirectory "$TEMP\SaltInstaller" + ${EndIf} + ${EndIf} + ${Trim} $msg $msg + DetailPrint "$msg" + FileOpen $logFileHandle "$LogFile" a + FileSeek $logFileHandle 0 END + FileWrite $logFileHandle "$msg$\r$\n" + FileClose $logFileHandle + FunctionEnd +!macroend +!insertmacro Func_LogMsg "" +!insertmacro Func_LogMsg "un." ############################################################################### @@ -538,31 +591,32 @@ SectionEnd # Hidden section (-) to install VCRedist Section -install_vcredist_2022 + ${DisableX64FSRedirection} + Var /GLOBAL VcRedistName - Var /GLOBAL VcRedistReg + # Determine which architecture needs to be installed + ${if} ${runningx64} + strcpy $VcRedistName "vcredist_x64_2022" + ${else} + strcpy $VcRedistName "vcredist_x86_2022" + ${endif} + detailPrint "Selected $VcRedistName installer" - # Only install 64bit VCRedist on 64bit machines - # Use RunningX64 here to get the Architecture for the system running the - # installer. - # 2013 >= 21005 - # 2015 >= 23026 - # 2017 >= 25008 - # 2019 >= 27508 - # 2022 >= 30704 - ${If} ${RunningX64} - StrCpy $VcRedistName "vcredist_x64_2022" - StrCpy $VcRedistReg "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" - ${Else} - StrCpy $VcRedistName "vcredist_x86_2022" - StrCpy $VcRedistReg "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x86" - ${EndIf} + # Check for the presence of vcruntime140.dll + IfFileExists "$WINDIR\System32\vcruntime140.dll" file_found - # Detecting VCRedist Installation - detailPrint "Checking for $VcRedistName..." - ReadRegDword $0 HKLM $VcRedistReg "Bld" - ${If} $0 < 30704 + file_not_found: + detailPrint "$VcRedistName not found" + # Install Call InstallVCRedist - ${EndIf} + Goto end_of_section + + file_found: + detailPrint "$VcRedistName found, install will continue" + + end_of_section: + + ${EnableX64FSRedirection} SectionEnd @@ -570,8 +624,7 @@ SectionEnd Function InstallVCRedist detailPrint "System requires $VcRedistName" MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 \ - "$VcRedistName is currently not installed. Would you like to \ - install?" \ + "$VcRedistName is currently not installed. Would you like to install?" \ /SD IDYES IDYES InstallVcRedist detailPrint "$VcRedistName not installed" @@ -627,28 +680,44 @@ Function InstallVCRedist FunctionEnd -Section "MainSection" SEC01 +Section "Install" Install01 ${If} $MoveExistingConfig == 1 + ${LogMsg} "Moving existing config to $APPDATA\Salt Project\Salt" + # This makes the $APPDATA variable point to the ProgramData folder # instead of the current user's roaming AppData folder + ${LogMsg} "Set context to all users" SetShellVarContext all - detailPrint "Moving existing config to $APPDATA\Salt Project\Salt" # Make sure the target directory exists - nsExec::Exec "md $APPDATA\Salt Project\Salt" + ${LogMsg} "Creating rootdir in ProgramData" + CreateDirectory "$APPDATA\Salt Project\Salt" + # Take ownership of the C:\salt directory - detailPrint "Taking ownership: $RootDir" - nsExec::Exec "takeown /F $RootDir /R" + ${LogMsg} "Taking ownership: $RootDir" + nsExec::ExecToStack "takeown /F $RootDir /R" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} # Move the C:\salt directory to the new location StrCpy $switch_overwrite 0 - detailPrint "Moving $RootDir to $APPDATA" - IfFileExists "$RootDir\conf" 0 +2 - !insertmacro MoveFolder "$RootDir\conf" "$APPDATA\Salt Project\Salt\conf" "*.*" - IfFileExists "$RootDir\srv" 0 +2 - !insertmacro MoveFolder "$RootDir\srv" "$APPDATA\Salt Project\Salt\srv" "*.*" - IfFileExists "$RootDir\var" 0 +2 - !insertmacro MoveFolder "$RootDir\var" "$APPDATA\Salt Project\Salt\var" "*.*" + ${If} ${FileExists} "$RootDir\conf\*.*" + ${LogMsg} "Moving $RootDir\conf to $APPDATA" + !insertmacro MoveFolder "$RootDir\conf" "$APPDATA\Salt Project\Salt\conf" "*.*" + ${EndIf} + ${If} ${FileExists} "$RootDir\srv\*.*" + ${LogMsg} "Moving $RootDir\srv to $APPDATA" + !insertmacro MoveFolder "$RootDir\srv" "$APPDATA\Salt Project\Salt\srv" "*.*" + ${EndIf} + ${If} ${FileExists} "$RootDir\var\*.*" + ${LogMsg} "Moving $RootDir\var to $APPDATA" + !insertmacro MoveFolder "$RootDir\var" "$APPDATA\Salt Project\Salt\var" "*.*" + ${EndIf} # Make RootDir the new location StrCpy $RootDir "$APPDATA\Salt Project\Salt" ${EndIf} @@ -657,20 +726,32 @@ Section "MainSection" SEC01 Call BackupExistingConfig ${EndIf} - # Install files to the Installation Directory + ${LogMsg} "Setting outpath to $INSTDIR" SetOutPath "$INSTDIR\" + ${LogMsg} "Setting Overwrite off" SetOverwrite off + ${LogMsg} "Copying files" File /r "..\..\buildenv\" # Set up Root Directory + ${LogMsg} "Creating directory structure" CreateDirectory "$RootDir\conf\pki\minion" CreateDirectory "$RootDir\conf\minion.d" CreateDirectory "$RootDir\var\cache\salt\minion\extmods\grains" CreateDirectory "$RootDir\var\cache\salt\minion\proc" CreateDirectory "$RootDir\var\log\salt" CreateDirectory "$RootDir\var\run" - nsExec::Exec 'icacls $RootDir /inheritance:r /grant:r "*S-1-5-32-544":(OI)(CI)F /grant:r "*S-1-5-18":(OI)(CI)F' + + ${LogMsg} "Setting permissions on RootDir" + nsExec::ExecToStack 'icacls "$RootDir" /inheritance:r /grant:r "*S-1-5-32-544":(OI)(CI)F /grant:r "*S-1-5-18":(OI)(CI)F' + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} SectionEnd @@ -679,24 +760,28 @@ Function .onInit # This function gets executed before any other. This is where we will # detect existing installations and config to be used by the installer + ${LogMsg} "Running ${OUTFILE}" + # Make sure we do not allow 32-bit Salt on 64-bit systems # This is the system the installer is running on ${If} ${RunningX64} # This is the Python architecture the installer was built with ${If} ${CPUARCH} == "x86" - MessageBox MB_OK|MB_ICONEXCLAMATION \ - "Detected 64-bit Operating system.$\n$\n\ - Please install the 64-bit version of Salt on this operating system." \ - /SD IDOK + StrCpy $msg "Detected 64-bit Operating system.$\n\ + Please install the 64-bit version of Salt on this operating system." + ${LogMsg} $msg + MessageBox MB_OK|MB_ICONEXCLAMATION $msg /SD IDOK + ${LogMsg} "Aborting" Abort ${EndIf} ${Else} # This is the Python architecture the installer was built with ${If} ${CPUARCH} == "AMD64" - MessageBox MB_OK|MB_ICONEXCLAMATION \ - "Detected 32-bit Operating system.$\n$\n\ - Please install the 32-bit version of Salt on this operating system." \ - /SD IDOK + StrCpy $msg "Detected 32-bit Operating system.$\n\ + Please install the 32-bit version of Salt on this operating system." + ${LogMsg} $msg + MessageBox MB_OK|MB_ICONEXCLAMATION $msg /SD IDOK + ${LogMsg} "Aborting" Abort ${EndIf} ${EndIf} @@ -706,14 +791,14 @@ Function .onInit # Uninstall msi-installed salt # Source: https://nsis-dev.github.io/NSIS-Forums/html/t-303468.html - # TODO: Add a message box here confirming the uninstall of the MSI !define upgradecode {FC6FB3A2-65DE-41A9-AD91-D10A402BD641} # Salt upgrade code StrCpy $0 0 + ${LogMsg} "Looking for MSI installation" loop: System::Call 'MSI::MsiEnumRelatedProducts(t "${upgradecode}",i0,i r0,t.r1)i.r2' ${If} $2 = 0 - # Now $1 contains the product code - DetailPrint product:$1 + # Now $1 contains the product code + ${LogMsg} product:$1 push $R0 StrCpy $R0 $1 Call UninstallMSI @@ -728,33 +813,48 @@ Function .onInit # NOTE: custom config is passed on the CLI. The GUI has its own checking # NOTE: when the user selects a custom config. ${If} $ConfigType == "Custom Config" - IfFileExists "$CustomConfig" checkExistingInstallation 0 + ${LogMsg} "Verifying custom config" + ${If} ${FileExists} "$CustomConfig" + ${LogMsg} "Found full path to custom config: $CustomConfig" + Goto checkExistingInstallation + ${EndIf} + ${If} ${FileExists} "$EXEDIR\$CustomConfig" + ${LogMsg} "Found custom config with the install binary: $CustomConfig" + Goto checkExistingInstallation + ${EndIf} + ${LogMsg} "Custom config not found. Aborting" Abort ${EndIf} checkExistingInstallation: # Check for existing installation + ${LogMsg} "Checking for existing installation" # The NSIS installer is a 32bit application and will use the WOW6432Node # in the registry by default. We need to look in the 64 bit location on # 64 bit systems ${If} ${RunningX64} + + ${LogMsg} "Setting registry context to 64-bit" # https://nsis.sourceforge.io/Docs/Chapter4.html#setregview SetRegView 64 # View the 64 bit portion of the registry - ${EndIf} - ReadRegStr $R0 HKLM \ - "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}" \ - "UninstallString" - - # Puts the nullsoft installer back to its default - SetRegView 32 # Set it back to the 32 bit portion of the registry - - # If not found, look in 32 bit - ${If} $R0 == "" + ${LogMsg} "Reading uninstall string" ReadRegStr $R0 HKLM \ "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}" \ "UninstallString" + + # Puts the nullsoft installer back to its default + ${LogMsg} "Setting registry context to 32-bit" + SetRegView 32 # Set it back to the 32 bit portion of the registry + + ${Else} + + ${LogMsg} "Reading uninstall string" + ReadRegStr $R0 HKLM \ + "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}" \ + "UninstallString" + ${EndIf} # If it's empty it's not installed @@ -763,33 +863,41 @@ Function .onInit # Set InstDir to the parent directory so that we can uninstall it ${GetParent} $R0 $INSTDIR + ${LogMsg} "Found existing installation at $R0" + # Found existing installation, prompt to uninstall - MessageBox MB_OKCANCEL|MB_USERICON \ - "${PRODUCT_NAME} is already installed.$\n$\n\ - Click `OK` to remove the existing installation." \ - /SD IDOK IDOK uninst + StrCpy $msg "${PRODUCT_NAME} is already installed.$\n\ + Click `OK` to remove the existing installation." + ${LogMsg} $msg + MessageBox MB_OKCANCEL|MB_USERICON $msg /SD IDOK IDOK uninst + ${LogMsg} "Aborting" Abort uninst: # Get current Silent status + ${LogMsg} "Getting current silent setting" StrCpy $R0 0 ${If} ${Silent} StrCpy $R0 1 ${EndIf} # Turn on Silent mode + ${LogMsg} "Setting to silent mode" SetSilent silent # Don't remove all directories when upgrading (old method) + ${LogMsg} "Setting uninstaller to not delete the install dir" StrCpy $DeleteInstallDir 0 # Don't remove RootDir when upgrading (new method) + ${LogMsg} "Setting uninstaller to not delete the root dir" StrCpy $DeleteRootDir 0 # Uninstall silently Call uninstallSalt + ${LogMsg} "Resetting silent setting to original" # Set it back to Normal mode, if that's what it was before ${If} $R0 == 0 SetSilent normal @@ -803,6 +911,7 @@ Function .onInit ${If} $ExistingConfigFound == 0 ${AndIf} $ConfigType == "Existing Config" + ${LogMsg} "Existing config not found, using Default config" StrCpy $ConfigType "Default Config" ${EndIf} @@ -811,13 +920,19 @@ FunctionEnd Function BackupExistingConfig - ${If} $ExistingConfigFound == 1 # If existing config found + ${If} $ExistingConfigFound == 1 # If existing config found ${AndIfNot} $ConfigType == "Existing Config" # If not using Existing Config # Backup the minion config - Rename "$RootDir\conf\minion" "$RootDir\conf\minion-${TIME_STAMP}.bak" - IfFileExists "$RootDir\conf\minion.d" 0 +2 - Rename "$RootDir\conf\minion.d" "$RootDir\conf\minion.d-${TIME_STAMP}.bak" + ${If} ${FileExists} "$RootDir\conf\minion" + ${LogMsg} "Renaming existing config file" + Rename "$RootDir\conf\minion" "$RootDir\conf\minion-$TimeStamp.bak" + ${EndIf} + + ${If} ${FileExists} "$RootDir\conf\minion.d\*.*" + ${LogMsg} "Renaming existing config directory" + Rename "$RootDir\conf\minion.d" "$RootDir\conf\minion.d-$TimeStamp.bak" + ${EndIf} ${EndIf} @@ -828,17 +943,21 @@ Function BackupExistingConfig # Check for a file name # Named file should be in the same directory as the installer + ${LogMsg} "Make sure config directory is exists" + ${LogMsg} "Path: $RootDir\conf" CreateDirectory "$RootDir\conf" - IfFileExists "$EXEDIR\$CustomConfig" 0 checkFullPath + + ${If} ${FileExists} "$EXEDIR\$CustomConfig" + ${LogMsg} "Copying custom config from path relative to installer" + ${LogMsg} "Path: $EXEDIR\$CustomConfig" CopyFiles /SILENT /FILESONLY "$EXEDIR\$CustomConfig" "$RootDir\conf\minion" - goto finished - - # Maybe it was a full path to a file - checkFullPath: - IfFileExists "$CustomConfig" 0 finished + ${ElseIf} ${FileExists} "$CustomConfig" + ${LogMsg} "Copying custom config from full path" + ${LogMsg} "Path: $CustomConfig" CopyFiles /SILENT /FILESONLY "$CustomConfig" "$RootDir\conf\minion" - - finished: + ${Else} + ${LogMsg} "Custom config not found, default values will be used" + ${EndIf} ${EndIf} @@ -847,6 +966,7 @@ FunctionEnd Section -Post + ${LogMsg} "Writing uninstaller" WriteUninstaller "$INSTDIR\uninst.exe" # The NSIS installer is a 32bit application and will use the WOW6432Node in @@ -854,9 +974,11 @@ Section -Post # systems ${If} ${RunningX64} # https://nsis.sourceforge.io/Docs/Chapter4.html#setregview + ${LogMsg} "Setting registry context to 64-bit registry" SetRegView 64 # View 64 bit portion of the registry ${EndIf} + ${LogMsg} "Updating installation information in the registry" # Write Uninstall Registry Entries WriteRegStr ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" \ "DisplayName" "$(^Name)" @@ -873,17 +995,20 @@ Section -Post WriteRegStr HKLM "SYSTEM\CurrentControlSet\services\salt-minion" \ "DependOnService" "nsi" + ${LogMsg} "Getting estimated size" # If ESTIMATED_SIZE is not set, calculated it ${If} ${ESTIMATED_SIZE} == 0 - ${GetSize} "$INSTDIR" "/S=OK" $0 $1 $2 + ${GetSize} "$INSTDIR" "/S=OK" $R0 $R1 $R2 ${Else} - StrCpy $0 ${ESTIMATED_SIZE} + StrCpy $R0 ${ESTIMATED_SIZE} ${Endif} - IntFmt $0 "0x%08X" $0 + IntFmt $R0 "0x%08X" $R0 + ${LogMsg} "Setting estimated size: $R0" WriteRegDWORD ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" \ - "EstimatedSize" "$0" + "EstimatedSize" "$R0" # Write Commandline Registry Entries + ${LogMsg} "Registering salt commands for the cli" WriteRegStr HKLM "${PRODUCT_CALL_REGKEY}" "" "$INSTDIR\salt-call.exe" WriteRegStr HKLM "${PRODUCT_CALL_REGKEY}" "Path" "$INSTDIR\" WriteRegStr HKLM "${PRODUCT_MINION_REGKEY}" "" "$INSTDIR\salt-minion.exe" @@ -895,64 +1020,124 @@ Section -Post # variable so we'll just do that whether it's new location or old. # Check for Program Files - # Set the current setting for INSTDIR... we'll only change it if it contains - # Program Files - StrCpy $RegInstDir $INSTDIR - - # Program Files # We want to use the environment variables instead of the hardcoded path - ${StrContains} $0 "Program Files" $INSTDIR - StrCmp $0 "" +2 # If it's empty, skip the next line - StrCpy $RegInstDir "%ProgramFiles%\Salt Project\Salt" - - # Check for ProgramData - # Set the current setting for RootDir. we'll only change it if it contains - # ProgramData - StrCpy $RegRootDir $RootDir - - # We want to use the environment variables instead of the hardcoded path - ${StrContains} $0 "ProgramData" $RootDir - StrCmp $0 "" +2 # If it's empty, skip the next line - StrCpy $RegRootDir "%ProgramData%\Salt Project\Salt" + # when setting values in the registry + ${LogMsg} "Getting registry values for install_dir and root_dir" + ${If} ${RunningX64} + ${StrRep} "$RegInstDir" "$INSTDIR" "$ProgramFiles64" "%PROGRAMFILES%" + ${Else} + ${StrRep} "$RegInstDir" "$INSTDIR" "$ProgramFiles" "%PROGRAMFILES%" + ${EndIf} + ${LogMsg} "install_dir: $RegInstDir" + SetShellVarContext all + ${StrRep} "$RegRootDir" "$RootDir" "$APPDATA" "%PROGRAMDATA%" + ${LogMsg} "root_dir: $RegRootDir" + ${LogMsg} "Writing install_dir and root_dir to the registry" WriteRegExpandStr HKLM "SOFTWARE\Salt Project\Salt" "install_dir" "$RegInstDir" WriteRegExpandStr HKLM "SOFTWARE\Salt Project\Salt" "root_dir" "$RegRootDir" # Puts the nullsoft installer back to its default + ${LogMsg} "Setting registry context back to 32-bit" SetRegView 32 # Set it back to the 32 bit portion of the registry # Register the Salt-Minion Service - nsExec::Exec `$INSTDIR\ssm.exe install salt-minion "$INSTDIR\salt-minion.exe" -c """$RootDir\conf""" -l quiet` - nsExec::Exec "$INSTDIR\ssm.exe set salt-minion Description Salt Minion from saltstack.com" - nsExec::Exec "$INSTDIR\ssm.exe set salt-minion Start SERVICE_AUTO_START" - nsExec::Exec "$INSTDIR\ssm.exe set salt-minion AppStopMethodConsole 24000" - nsExec::Exec "$INSTDIR\ssm.exe set salt-minion AppStopMethodWindow 2000" - nsExec::Exec "$INSTDIR\ssm.exe set salt-minion AppRestartDelay 60000" + ${LogMsg} "Registering the salt-minion service" + nsExec::ExecToStack `"$INSTDIR\ssm.exe" install salt-minion "$INSTDIR\salt-minion.exe" -c """$RootDir\conf""" -l quiet` + pop $0 # ExitCode + pop $1 # StdOut + ${IfNot} $0 == 0 + StrCpy $msg "Failed to register the salt minion service.$\n\ + ExitCode: $0$\n\ + StdOut: $1" + ${LogMsg} $msg + MessageBox MB_OK|MB_ICONEXCLAMATION $msg /SD IDOK IDOK + ${LogMsg} "Aborting" + Abort + ${Else} + ${LogMsg} "Setting service description" + nsExec::ExecToStack "$INSTDIR\ssm.exe set salt-minion Description Salt Minion from saltstack.com" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} + ${LogMsg} "Setting service autostart" + nsExec::ExecToStack "$INSTDIR\ssm.exe set salt-minion Start SERVICE_AUTO_START" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} + ${LogMsg} "Setting service console stop method" + nsExec::ExecToStack "$INSTDIR\ssm.exe set salt-minion AppStopMethodConsole 24000" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} + ${LogMsg} "Setting service windows stop method" + nsExec::ExecToStack "$INSTDIR\ssm.exe set salt-minion AppStopMethodWindow 2000" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} + ${LogMsg} "Setting service app restart delay" + nsExec::ExecToStack "$INSTDIR\ssm.exe set salt-minion AppRestartDelay 60000" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} + ${EndIf} # There is a default minion config laid down in the $INSTDIR directory ${Switch} $ConfigType ${Case} "Existing Config" + ${LogMsg} "Using existing config" # If this is an Existing Config, we don't do anything ${Break} ${Case} "Custom Config" + ${LogMsg} "Using custom config" # If this is a Custom Config, update the custom config Call updateMinionConfig ${Break} ${Case} "Default Config" + ${LogMsg} "Using default config" # If this is the Default Config, we move it and update it StrCpy $switch_overwrite 1 - !insertmacro MoveFolder "$INSTDIR\configs" "$RootDir\conf" "*.*" Call updateMinionConfig ${Break} ${EndSwitch} # Delete the configs directory that came with the installer + ${LogMsg} "Removing configs directory" RMDir /r "$INSTDIR\configs" # Add $INSTDIR in the Path + ${LogMsg} "Adding salt to the path" EnVar::SetHKLM EnVar::AddValue Path "$INSTDIR" + Pop $0 + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + # See this table for Error Codes: + # https://github.com/GsNSIS/EnVar#error-codes + ${LogMsg} "Failed. Error Code: $0" + ${EndIf} SectionEnd @@ -961,14 +1146,38 @@ Function .onInstSuccess # If StartMinionDelayed is 1, then set the service to start delayed ${If} $StartMinionDelayed == 1 - nsExec::Exec "$INSTDIR\ssm.exe set salt-minion Start SERVICE_DELAYED_AUTO_START" + ${LogMsg} "Setting the salt-minion service to start delayed" + nsExec::ExecToStack "$INSTDIR\ssm.exe set salt-minion Start SERVICE_DELAYED_AUTO_START" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} ${EndIf} # If start-minion is 1, then start the service ${If} $StartMinion == 1 - nsExec::Exec 'net start salt-minion' + ${LogMsg} "Starting the salt-minion service" + nsExec::ExecToStack "$INSTDIR\ssm.exe start salt-minion" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + ${EndIf} ${EndIf} + ${LogMsg} "Salt installation complete" + + # I don't know of another way to fix this. The installer hangs intermittently + # This will force kill the installer process. This must be the last thing that + # is run. + StrCpy $1 "wmic Path win32_process where $\"name like '$EXEFILE'$\" Call Terminate" + nsExec::Exec $1 + FunctionEnd @@ -976,12 +1185,15 @@ Function un.onInit Call un.parseUninstallerCommandLineSwitches - MessageBox MB_USERICON|MB_YESNO|MB_DEFBUTTON1 \ - "Are you sure you want to completely remove $(^Name) and all of its \ - components?" \ - /SD IDYES IDYES +2 + StrCpy $msg "Are you sure you want to completely remove $(^Name) and all \ + of its components?" + ${LogMsg} $msg + MessageBox MB_USERICON|MB_YESNO|MB_DEFBUTTON1 $msg /SD IDYES IDYES continue_remove + ${LogMsg} "Aborting" Abort + continue_remove: + FunctionEnd @@ -990,8 +1202,17 @@ Section Uninstall Call un.uninstallSalt # Remove $INSTDIR from the Path + ${LogMsg} "Removing salt from the path" EnVar::SetHKLM EnVar::DeleteValue Path "$INSTDIR" + Pop $0 + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + # See this table for Error Codes: + # https://github.com/GsNSIS/EnVar#error-codes + ${LogMsg} "Failed. Error Code: $0" + ${EndIf} SectionEnd @@ -1001,57 +1222,64 @@ Function ${un}uninstallSalt # WARNING: Any changes made here need to be reflected in the MSI uninstaller # Make sure we're in the right directory + ${LogMsg} "Detecting INSTDIR" ${If} $INSTDIR == "c:\salt\Scripts" - StrCpy $INSTDIR "C:\salt" + StrCpy $INSTDIR "C:\salt" ${EndIf} # $ProgramFiles is different depending on the CPU Architecture # https://nsis.sourceforge.io/Reference/$PROGRAMFILES # x86 : C:\Program Files # x64 : C:\Program Files (x86) ${If} $INSTDIR == "$ProgramFiles\Salt Project\Salt\Scripts" - StrCpy $INSTDIR "$ProgramFiles\Salt Project\Salt" + StrCpy $INSTDIR "$ProgramFiles\Salt Project\Salt" ${EndIf} # $ProgramFiles64 is the C:\Program Files directory ${If} $INSTDIR == "$ProgramFiles64\Salt Project\Salt\Scripts" - StrCpy $INSTDIR "$ProgramFiles64\Salt Project\Salt" + StrCpy $INSTDIR "$ProgramFiles64\Salt Project\Salt" ${EndIf} + ${LogMsg} "INSTDIR: $INSTDIR" - # Stop and Remove salt-minion service - nsExec::Exec "net stop salt-minion" - nsExec::Exec "sc delete salt-minion" + # Only attempt to remove the services if ssm.exe is present" + ${If} ${FileExists} "$INSTDIR\ssm.exe" - # Stop and remove the salt-master service - nsExec::Exec "net stop salt-master" - nsExec::Exec "sc delete salt-master" + ${LogMsg} "ssm.exe found" - # We need to make sure the service is stopped and removed before deleting - # any files - StrCpy $0 1 # Tries - StrCpy $1 1 # Service Present - loop: - detailPrint "Verifying salt-minion deletion: try $0" - nsExec::ExecToStack 'net start | FIND /C /I "salt-minion"' - pop $2 # First on the stack is the return code - pop $1 # Next on the stack is standard out (service present) - ${If} $1 == 1 - ${If} $0 < 5 - IntOp $0 $0 + 1 - Sleep 1000 - goto loop - ${Else} - MessageBox MB_OK|MB_ICONEXCLAMATION \ - "Failed to remove salt-minion service" \ - /SD IDOK - Abort - ${EndIf} + # Stop and Remove salt-minion service + ${LogMsg} "Stopping salt-minion service" + nsExec::ExecToStack "$INSTDIR\ssm.exe stop salt-minion" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" ${EndIf} + ${LogMsg} "Removing salt-minion service" + nsExec::ExecToStack "$INSTDIR\ssm.exe remove salt-minion confirm" + pop $0 # ExitCode + pop $1 # StdOut + ${If} $0 == 0 + ${LogMsg} "Success" + ${Else} + ${LogMsg} "Failed$\r$\nExitCode: $0$\r$\nStdOut: $1" + Abort + ${EndIf} + + ${Else} + + ${LogMsg} "ssm.exe not found" + + ${EndIf} + # Remove files + ${LogMsg} "Deleting individual files" Delete "$INSTDIR\multi-minion*" Delete "$INSTDIR\salt*" Delete "$INSTDIR\ssm.exe" Delete "$INSTDIR\uninst.exe" Delete "$INSTDIR\vcredist.exe" + ${LogMsg} "Deleting directories" RMDir /r "$INSTDIR\DLLs" RMDir /r "$INSTDIR\Include" RMDir /r "$INSTDIR\Lib" @@ -1064,37 +1292,24 @@ Function ${un}uninstallSalt # the registry by default. We need to look in the 64 bit location on 64 bit # systems ${If} ${RunningX64} + + ${LogMsg} "Removing 64-bit registry items" # https://nsis.sourceforge.io/Docs/Chapter4.html#setregview SetRegView 64 # View the 64 bit portion of the registry - # Get Root Directory from the Registry (64 bit) - ReadRegStr $RootDir HKLM "SOFTWARE\Salt Project\Salt" "root_dir" - - # Remove Registry entries - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" - - # Remove Command Line Registry entries - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_CALL_REGKEY}" - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_CP_REGKEY}" - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_KEY_REGKEY}" - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_MASTER_REGKEY}" - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_MINION_REGKEY}" - DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_RUN_REGKEY}" - DeleteRegKey HKLM "SOFTWARE\Salt Project" ${EndIf} - # Remove everything in the 32 bit registry - SetRegView 32 # Set it to 32 bit - - ${If} $RootDir == "" - # Get Root Directory from the Registry (32 bit) - ReadRegStr $RootDir HKLM "SOFTWARE\Salt Project\Salt" "root_dir" - ${EndIf} + ${LogMsg} "Getting RootDir from 64-bit registry" + # Get Root Directory from the Registry (64 bit) + ReadRegStr $RootDir HKLM "SOFTWARE\Salt Project\Salt" "root_dir" + ${LogMsg} "RootDir: $RootDir" # Remove Registry entries + ${LogMsg} "Deleting Add/Remove programs entries" DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" # Remove Command Line Registry entries + ${LogMsg} "Deleting Command Line Registry Entries" DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_CALL_REGKEY}" DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_CP_REGKEY}" DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_KEY_REGKEY}" @@ -1105,8 +1320,10 @@ Function ${un}uninstallSalt # SystemDrive is not a built in NSIS constant, so we need to get it from # the environment variables + ${LogMsg} "Getting System Drive" ReadEnvStr $0 "SystemDrive" # Get the SystemDrive env var StrCpy $SysDrive "$0\" + ${LogMsg} "SystemDrive: $SysDrive" # Automatically close when finished SetAutoClose true @@ -1118,11 +1335,13 @@ Function ${un}uninstallSalt # directory is also the root_dir which includes the config and pki # directories ${IfNot} $DeleteInstallDir == 1 - MessageBox MB_YESNO|MB_DEFBUTTON2|MB_USERICON \ - "Would you like to completely remove $INSTDIR and all of its contents?" \ - /SD IDNO IDNO finished + StrCpy $msg "Would you like to completely remove $INSTDIR and all \ + of its contents?" + ${LogMsg} $msg + MessageBox MB_YESNO|MB_DEFBUTTON2|MB_USERICON $msg /SD IDNO IDNO finished ${EndIf} + ${LogMsg} "Removing INSTDIR" SetOutPath "$SysDrive" # Can't remove CWD RMDir /r "$INSTDIR" @@ -1133,18 +1352,19 @@ Function ${un}uninstallSalt # and pki directories. These directories will not be removed during # an upgrade. ${IfNot} $DeleteRootDir == 1 - MessageBox MB_YESNO|MB_DEFBUTTON2|MB_USERICON \ - "Would you like to completely remove the entire Salt \ + StrCpy $msg "Would you like to completely remove the entire Salt \ Installation? This includes the following:$\n\ - Extra Pip Packages ($INSTDIR\extras-3.##)$\n\ - Minion Config ($RootDir\conf)$\n\ - - Minion PKIs ($RootDir\conf\pki)"\ - /SD IDNO IDNO finished + - Minion PKIs ($RootDir\conf\pki)" + ${LogMsg} $msg + MessageBox MB_YESNO|MB_DEFBUTTON2|MB_USERICON $msg /SD IDNO IDNO finished ${EndIf} # New Method Installation # This makes the $APPDATA variable point to the ProgramData folder instead # of the current user's roaming AppData folder + ${LogMsg} "Setting Shell Context to All Users" SetShellVarContext all # We can always remove the Installation Directory on New Method Installs @@ -1157,6 +1377,7 @@ Function ${un}uninstallSalt ${AndIf} $INSTDIR != $ProgramFiles64 ${AndIf} $INSTDIR != $SysDrive ${AndIf} $INSTDIR != $WinDir + ${LogMsg} "Removing INSTDIR" SetOutPath "$SysDrive" # Can't remove CWD RMDir /r $INSTDIR ${EndIf} @@ -1166,11 +1387,14 @@ Function ${un}uninstallSalt # We want to remove ProgramFiles\Salt Project # Only delete Salt Project directory if it's in Program Files # Otherwise, we can't guess where the user may have installed salt - ${GetParent} $INSTDIR $0 # Get parent directory (Salt Project) - ${If} $0 == "$ProgramFiles\Salt Project" # Make sure it's ProgramFiles - ${OrIf} $0 == "$ProgramFiles64\Salt Project" # Make sure it's Program Files (x86) + ${LogMsg} "Getting InstDir Parent Directory" + ${GetParent} $INSTDIR $R0 # Get parent directory (Salt Project) + ${LogMsg} "Parent: $R0" + ${If} $R0 == "$ProgramFiles\Salt Project" # Make sure it's ProgramFiles + ${OrIf} $R0 == "$ProgramFiles64\Salt Project" # Make sure it's Program Files (x86) + ${LogMsg} "Removing Salt Project directory from Program Files" SetOutPath "$SysDrive" # Can't remove CWD - RMDir /r $0 + RMDir /r $R0 ${EndIf} # If RootDir is still empty, use C:\salt @@ -1181,32 +1405,45 @@ Function ${un}uninstallSalt # Expand any environment variables ExpandEnvStrings $RootDir $RootDir + ${LogMsg} "Removing RootDir: $RootDir" + # Remove the Salt Project directory in ProgramData # The Salt Project directory will only ever be in ProgramData # It is not user selectable - ${GetParent} $RootDir $0 # Get parent directory - ${If} $0 == "$APPDATA\Salt Project" # Make sure it's not ProgramData + ${LogMsg} "Getting RootDir Parent Directory" + ${GetParent} $RootDir $R0 # Get parent directory + ${LogMsg} "Parent: $R0" + ${If} $R0 == "$APPDATA\Salt Project" # Make sure it's not ProgramData + ${LogMsg} "Removing Parent Directory from APPDATA" SetOutPath "$SysDrive" # Can't remove CWD - RMDir /r $0 + RMDir /r $R0 ${EndIf} ${EndIf} finished: + ${LogMsg} "Uninstall Complete" FunctionEnd !macroend - - !insertmacro uninstallSalt "" !insertmacro uninstallSalt "un." Function un.onUninstSuccess + HideWindow - MessageBox MB_OK|MB_USERICON \ - "$(^Name) was successfully removed from your computer." \ - /SD IDOK + + StrCpy $msg "$(^Name) was successfully removed from your computer." + ${LogMsg} $msg + MessageBox MB_OK|MB_USERICON $msg /SD IDOK + + # I don't know of another way to fix this. The installer hangs intermittently + # This will force kill the installer process. This must be the last thing that + # is run. + StrCpy $1 "wmic Path win32_process where $\"name like '$EXEFILE'$\" Call Terminate" + nsExec::Exec $1 + FunctionEnd @@ -1228,38 +1465,42 @@ FunctionEnd # # ${Trim} $0 $1 ; Trimmed String, String to Trim #------------------------------------------------------------------------------ -Function Trim +!macro Func_Trim un + Function ${un}Trim - Exch $R1 # Original string - Push $R2 + Exch $R1 # Original string + Push $R2 - Loop: - StrCpy $R2 "$R1" 1 - StrCmp "$R2" " " TrimLeft - StrCmp "$R2" "$\r" TrimLeft - StrCmp "$R2" "$\n" TrimLeft - StrCmp "$R2" "$\t" TrimLeft - GoTo Loop2 - TrimLeft: - StrCpy $R1 "$R1" "" 1 - Goto Loop + Loop: + StrCpy $R2 "$R1" 1 + StrCmp "$R2" " " TrimLeft + StrCmp "$R2" "$\r" TrimLeft + StrCmp "$R2" "$\n" TrimLeft + StrCmp "$R2" "$\t" TrimLeft + GoTo Loop2 + TrimLeft: + StrCpy $R1 "$R1" "" 1 + Goto Loop - Loop2: - StrCpy $R2 "$R1" 1 -1 - StrCmp "$R2" " " TrimRight - StrCmp "$R2" "$\r" TrimRight - StrCmp "$R2" "$\n" TrimRight - StrCmp "$R2" "$\t" TrimRight - GoTo Done - TrimRight: - StrCpy $R1 "$R1" -1 - Goto Loop2 + Loop2: + StrCpy $R2 "$R1" 1 -1 + StrCmp "$R2" " " TrimRight + StrCmp "$R2" "$\r" TrimRight + StrCmp "$R2" "$\n" TrimRight + StrCmp "$R2" "$\t" TrimRight + GoTo Done + TrimRight: + StrCpy $R1 "$R1" -1 + Goto Loop2 - Done: - Pop $R2 - Exch $R1 + Done: + Pop $R2 + Exch $R1 -FunctionEnd + FunctionEnd +!macroend +!insertmacro Func_Trim "" +!insertmacro Func_Trim "un." #------------------------------------------------------------------------------ @@ -1351,53 +1592,6 @@ Function Explode FunctionEnd -#------------------------------------------------------------------------------ -# StrContains -# -# This function does a case sensitive searches for an occurrence of a substring in a string. -# It returns the substring if it is found. -# Otherwise it returns null(""). -# Written by kenglish_hi -# Adapted from StrReplace written by dandaman32 -#------------------------------------------------------------------------------ -Function StrContains - - # Initialize variables - Var /GLOBAL STR_HAYSTACK - Var /GLOBAL STR_NEEDLE - Var /GLOBAL STR_CONTAINS_VAR_1 - Var /GLOBAL STR_CONTAINS_VAR_2 - Var /GLOBAL STR_CONTAINS_VAR_3 - Var /GLOBAL STR_CONTAINS_VAR_4 - Var /GLOBAL STR_RETURN_VAR - - Exch $STR_NEEDLE - Exch 1 - Exch $STR_HAYSTACK - # Uncomment to debug - #MessageBox MB_OK 'STR_NEEDLE = $STR_NEEDLE STR_HAYSTACK = $STR_HAYSTACK ' - StrCpy $STR_RETURN_VAR "" - StrCpy $STR_CONTAINS_VAR_1 -1 - StrLen $STR_CONTAINS_VAR_2 $STR_NEEDLE - StrLen $STR_CONTAINS_VAR_4 $STR_HAYSTACK - - loop: - IntOp $STR_CONTAINS_VAR_1 $STR_CONTAINS_VAR_1 + 1 - StrCpy $STR_CONTAINS_VAR_3 $STR_HAYSTACK $STR_CONTAINS_VAR_2 $STR_CONTAINS_VAR_1 - StrCmp $STR_CONTAINS_VAR_3 $STR_NEEDLE found - StrCmp $STR_CONTAINS_VAR_1 $STR_CONTAINS_VAR_4 done - Goto loop - - found: - StrCpy $STR_RETURN_VAR $STR_NEEDLE - Goto done - - done: - Pop $STR_NEEDLE # Prevent "invalid opcode" errors and keep the stack clean - Exch $STR_RETURN_VAR -FunctionEnd - - #------------------------------------------------------------------------------ # UninstallMSI Function # - Uninstalls MSI by product code @@ -1438,6 +1632,8 @@ Function getExistingInstallation # This function also sets the RootDir and INSTDIR variables used by the # installer. + ${LogMsg} "Detecting existing installation" + # Reset ExistingInstallation StrCpy $ExistingInstallation 0 @@ -1445,6 +1641,7 @@ Function getExistingInstallation # Use RunningX64 here to get the Architecture for the system running the # installer. # There are 3 scenarios here: + ${LogMsg} "Setting Default InstDir" ${If} ${RunningX64} StrCpy $INSTDIR "$ProgramFiles64\Salt Project\Salt" ${Else} @@ -1457,6 +1654,7 @@ Function getExistingInstallation SetShellVarContext all # Set default location of for salt config + ${LogMsg} "Setting Default RootDir" StrCpy $RootDir "$APPDATA\Salt Project\Salt" # The NSIS installer is a 32bit application and will use the WOW6432Node in @@ -1468,9 +1666,14 @@ Function getExistingInstallation ${EndIf} # Check for existing new method installation from registry + ${LogMsg} "Looking for New Method installation" + # Look for `install_dir` in HKLM\SOFTWARE\Salt Project\Salt + ${LogMsg} "Getting INSTDIR from Registry" ReadRegStr $R0 HKLM "SOFTWARE\Salt Project\Salt" "install_dir" StrCmp $R0 "" checkOldInstallation + + ${LogMsg} "Detected existing installation" StrCpy $ExistingInstallation 1 # Set INSTDIR to the location in the registry @@ -1478,31 +1681,39 @@ Function getExistingInstallation # Expand any environment variables it contains ExpandEnvStrings $INSTDIR $INSTDIR + ${LogMsg} "INSTDIR: $INSTDIR" + # Set RootDir, if defined + ${LogMsg} "Getting RootDir" ReadRegStr $R0 HKLM "SOFTWARE\Salt Project\Salt" "root_dir" StrCmp $R0 "" finished StrCpy $RootDir $R0 # Expand any environment variables it contains ExpandEnvStrings $RootDir $RootDir + ${LogMsg} "RootDir: $RootDir" Goto finished # Check for existing old method installation # Look for `python.exe` in C:\salt\bin checkOldInstallation: + ${LogMsg} "Looking for Old Method installation" IfFileExists "C:\salt\bin\python.exe" 0 newInstallation StrCpy $ExistingInstallation 1 StrCpy $INSTDIR "C:\salt" StrCpy $RootDir "C:\salt" + ${LogMsg} "Found Old Method installation" Goto finished # This is a new installation # Check if custom location was passed via command line newInstallation: + ${LogMsg} "This is a New Installation" ${IfNot} $CustomLocation == "" StrCpy $INSTDIR $CustomLocation ${EndIf} finished: + ${LogMsg} "Finished detecting installation type" SetRegView 32 # View the 32 bit portion of the registry FunctionEnd @@ -1510,6 +1721,8 @@ FunctionEnd Function getExistingMinionConfig + ${LogMsg} "Getting existing Minion Config" + # Set Config Found Default Value StrCpy $ExistingConfigFound 0 @@ -1517,11 +1730,15 @@ Function getExistingMinionConfig # Root dir is usually ProgramData\Salt Project\Salt\conf though it may be # C:\salt\conf if Salt was installed the old way + ${LogMsg} "Looking for minion config in $RootDir" IfFileExists "$RootDir\conf\minion" check_owner + ${LogMsg} "Looking for minion config in C:\salt" IfFileExists "C:\salt\conf\minion" old_location confNotFound old_location: + ${LogMsg} "Found config in old location. Updating RootDir" StrCpy $RootDir "C:\salt" + ${LogMsg} "RootDir: $RootDir" check_owner: # We need to verify the owner of the config directory (C:\salt\conf) to @@ -1556,27 +1773,40 @@ Function getExistingMinionConfig # running the install with the silent option (/S) it will backup the # untrusted config directory and continue with the default config. + ${LogMsg} "Validating permissions to config" AccessControl::GetFileOwner /SID "$RootDir\conf" Pop $0 # Check for valid SIDs StrCmp $0 "S-1-5-32-544" correct_owner # Administrators Group (NullSoft) StrCmp $0 "S-1-5-18" correct_owner # Local System (MSI) - MessageBox MB_YESNO \ - "Insecure config found at $RootDir\conf. If you continue, the \ + StrCpy $msg "Insecure config found at $RootDir\conf. If you continue, the \ config directory will be renamed to $RootDir\conf.insecure \ - and the default config will be used. Continue?" \ - /SD IDYES IDYES insecure_config + and the default config will be used. Continue?" + ${LogMsg} $msg + MessageBox MB_YESNO $msg /SD IDYES IDYES insecure_config + ${LogMsg} "Aborting" Abort insecure_config: # Backing up insecure config - Rename "$RootDir\conf" "$RootDir\conf.insecure-${TIME_STAMP}" + ${LogMsg} "Backing up insecure config" + Rename "$RootDir\conf" "$RootDir\conf.insecure-$TimeStamp" Goto confNotFound correct_owner: + ${LogMsg} "Found existing config with correct permissions" StrCpy $ExistingConfigFound 1 + ${LogMsg} "Opening minion config read-only" + ClearErrors FileOpen $0 "$RootDir\conf\minion" r + IfErrors 0 get_config_values + ${LogMsg} "There was an error opening the minion config" + ${LogMsg} "Config values will not be detected" + Goto set_default_values + + get_config_values: + ${LogMsg} "Getting config values from existing config" confLoop: ClearErrors # clear Errors @@ -1616,17 +1846,21 @@ Function getExistingMinionConfig Goto confLoop EndOfFile: - FileClose $0 + FileClose $0 confNotFound: + ${LogMsg} "Config not found" - # Set Default Config Values if not found - ${If} $MasterHost_Cfg == "" - StrCpy $MasterHost_Cfg "salt" - ${EndIf} - ${If} $MinionName_Cfg == "" - StrCpy $MinionName_Cfg "hostname" - ${EndIf} + set_default_values: + # Set Default Config Values if not found + ${If} $MasterHost_Cfg == "" + ${LogMsg} "Setting master host setting to default: salt" + StrCpy $MasterHost_Cfg "salt" + ${EndIf} + ${If} $MinionName_Cfg == "" + ${LogMsg} "Setting minion id setting to default: hostname" + StrCpy $MinionName_Cfg "hostname" + ${EndIf} FunctionEnd @@ -1634,38 +1868,69 @@ FunctionEnd Var cfg_line Var chk_line Var lst_check +Var tgt_file +Var tmp_file Function updateMinionConfig - ClearErrors - FileOpen $0 "$RootDir\conf\minion" "r" # open target file for reading - GetTempFileName $R0 # get new temp file name - FileOpen $1 $R0 "w" # open temp file for writing - StrCpy $ConfigWriteMaster 1 # write the master config value StrCpy $ConfigWriteMinion 1 # write the minion config value + ${If} $MasterHost == "" # if master is empty + ${OrIf} $MasterHost == "salt" # or if master is 'salt' + StrCpy $ConfigWriteMaster 0 # no need to write master config + ${EndIf} # close if statement + ${If} $MinionName == "" # if minion is empty + ${OrIf} $MinionName == "hostname" # and if minion is not 'hostname' + StrCpy $ConfigWriteMinion 0 # no need to write minion config + ${EndIf} # close if statement + + ${If} $ConfigWriteMaster == 0 + ${AndIf} $ConfigWriteMinion == 0 + ${LogMsg} "No config values to update. Config will not be updated" + Goto update_minion_config_finished + ${EndIf} + + ${LogMsg} "Updating Minion Config" + + ${LogMsg} "Opening target file: $RootDir\conf\minion" + ClearErrors + FileOpen $tgt_file "$RootDir\conf\minion" r # open target file for reading + ${If} ${Errors} + ${LogMsg} "Target file could not be opened read-only" + ${LogMsg} "Minion config will not be updated" + Goto update_minion_config_finished + ${EndIf} + + GetTempFileName $R0 # get new temp file name + ${LogMsg} "Opening temp file: $R0" + ClearErrors + FileOpen $tmp_file "$R0" w # open temp file for writing + ${If} ${Errors} + ${LogMsg} "Temp file could not be opened for writing" + ${LogMsg} "Minion config will not be updated" + Goto update_minion_config_finished + ${EndIf} + loop: # loop through each line - FileRead $0 $cfg_line # read line from target file - IfErrors done # end if errors are encountered (end of line) + ${LogMsg} "Reading line from target config file" + ClearErrors + FileRead $tgt_file $cfg_line # read line from target file + ${If} ${Errors} + ${LogMsg} "Error: Most likely reached End-Of-File" + Goto done + ${EndIf} loop_after_read: StrCpy $lst_check 0 # list check not performed - ${If} $MasterHost == "" # if master is empty - ${OrIf} $MasterHost == "salt" # or if master is 'salt' - StrCpy $ConfigWriteMaster 0 # no need to write master config - ${EndIf} # close if statement - ${If} $MinionName == "" # if minion is empty - ${OrIf} $MinionName == "hostname" # and if minion is not 'hostname' - StrCpy $ConfigWriteMinion 0 # no need to write minion config - ${EndIf} # close if statement - ${If} $ConfigWriteMaster == 1 # if we need to write master config ${StrLoc} $3 $cfg_line "master:" ">" # where is 'master:' in this line ${If} $3 == 0 # is it in the first... ${OrIf} $3 == 1 # or second position (account for comments) + ${LogMsg} "Found master. Updating temp config" + ${Explode} $9 "," $MasterHost # Split the hostname on commas, $9 is the number of items found ${If} $9 == 1 # 1 means only a single master was passed StrCpy $cfg_line "master: $MasterHost$\r$\n" # write the master @@ -1684,7 +1949,7 @@ Function updateMinionConfig # Remove remaining items in list ${While} $lst_check == 0 # while list item found - FileRead $0 $chk_line # read line from target file + FileRead $tgt_file $chk_line # read line from target file IfErrors done # end if errors are encountered (end of line) ${StrLoc} $3 $chk_line " - " ">" # where is 'master:' in this line ${If} $3 == "" # is it in the first... @@ -1703,12 +1968,21 @@ Function updateMinionConfig ${StrLoc} $3 $cfg_line "id:" ">" # where is 'id:' in this line ${If} $3 == 0 # is it in the first... ${OrIf} $3 == 1 # or the second position (account for comments) + ${LogMsg} "Found minion ID. Updating temp config" StrCpy $cfg_line "id: $MinionName$\r$\n" # write the minion config setting StrCpy $ConfigWriteMinion 0 # minion value written to config ${EndIf} # close if statement ${EndIf} # close if statement - FileWrite $1 $cfg_line # write changed or unchanged line to temp file + ClearErrors + ${LogMsg} "Writing config line(s) to temp file" + # Enable this line for troubleshooting + # ${LogMsg} "cfg_line: $cfg_line" + FileWrite $tmp_file $cfg_line # write changed or unchanged line to temp file + ${If} ${Errors} + ${LogMsg} "There was an error writing new config line(s) to temp file" + Goto update_minion_config_finished + ${EndIf} ${If} $lst_check == 1 # master not written to the config StrCpy $cfg_line $chk_line @@ -1722,6 +1996,8 @@ Function updateMinionConfig # Does master config still need to be written ${If} $ConfigWriteMaster == 1 # master not written to the config + ${LogMsg} "Master not found in existing config. Appending to the bottom" + ${Explode} $9 "," $MasterHost # split the hostname on commas, $9 is the number of items found ${If} $9 == 1 # 1 means only a single master was passed StrCpy $cfg_line "master: $MasterHost" # write the master @@ -1737,268 +2013,201 @@ Function updateMinionConfig Goto loop_explode_2 # do it again ${EndIf} # close if statement ${EndIf} # close if statement - FileWrite $1 $cfg_line # write changed or unchanged line to temp file + + ClearErrors + ${LogMsg} "Writing master config to temp file" + FileWrite $tmp_file $cfg_line # write changed or unchanged line to temp file + ${If} ${Errors} + ${LogMsg} "There was an error writing master config to the temp file" + ${LogMsg} "cfg_line: $cfg_line" + Goto update_minion_config_finished + ${EndIf} ${EndIf} # close if statement ${If} $ConfigWriteMinion == 1 # minion ID not written to the config + ${LogMsg} "Minion ID not found in existing config. Appending to the bottom" StrCpy $cfg_line "$\r$\nid: $MinionName" # write the minion config setting - FileWrite $1 $cfg_line # write changed or unchanged line to temp file + + ClearErrors + ${LogMsg} "Writing minion id to temp config file" + FileWrite $tmp_file $cfg_line # write changed or unchanged line to temp file + ${If} ${Errors} + ${LogMsg} "There was an error writing minion id to temop config file" + ${LogMsg} "cfg_line: $cfg_line" + Goto update_minion_config_finished + ${EndIf} ${EndIf} # close if statement - FileClose $0 # close target file - FileClose $1 # close temp file + ${LogMsg} "Closing config files" + FileClose $tgt_file # close target file + FileClose $tmp_file # close temp file + ${LogMsg} "Deleting target config" Delete "$RootDir\conf\minion" # delete target file + ${LogMsg} "Copying new target config" CopyFiles /SILENT $R0 "$RootDir\conf\minion" # copy temp file to target file + ${LogMsg} "Deleting old temp file" Delete $R0 # delete temp file + update_minion_config_finished: + ${LogMsg} "Update minion config finished" + FunctionEnd Function un.parseUninstallerCommandLineSwitches - # Load the parameters - ${GetParameters} $R0 - - # Display Help - ClearErrors - ${GetOptions} $R0 "/?" $R1 - IfErrors display_un_help_not_found - - # Using a message box here - # I couldn't get the console output to work with the uninstaller - MessageBox MB_OK \ - "Help for Salt Minion Uninstallation\ - $\n\ - $\n==============================================\ - $\n\ - $\n/delete-install-dir$\tDelete the installation directory that contains the\ - $\n$\t$\tconfig and pki directories. Default is to not delete\ - $\n$\t$\tthe installation directory\ - $\n\ - $\n$\t$\tThis applies to old method installations where\ - $\n$\t$\tthe root directory and the installation directory\ - $\n$\t$\tare the same (C:\salt)\ - $\n\ - $\n/delete-root-dir$\tDelete the root directory that contains the config\ - $\n$\t$\tand pki directories. Also removes the installation directory\ - $\n$\t$\tincluding the extras directory. Default is to not delete\ - $\n\ - $\n$\t$\tThis applies to new method installations where the\ - $\n$\t$\troot directory is in ProgramData and the installation\ - $\n$\t$\tdirectory is user defined, usually Program Files\ - $\n\ - $\n/S$\t$\tUninstall Salt silently\ - $\n\ - $\n/?$\t$\tDisplay this help screen\ - $\n\ - $\n--------------------------------------------------------------------------------------------\ - $\n\ - $\nExamples:\ - $\n\ - $\n$\tuninst.exe /S\ - $\n\ - $\n$\tuninst.exe /S /delete-root-dir\ - $\n\ - $\n==============================================" - - Abort - - display_un_help_not_found: + ${LogMsg} "Parsing command line parameters for the Uninstaller" # Load the parameters - ${GetParameters} $R0 + ${GetParameters} $cmdLineParams # Uninstaller: Remove Installation Directory + ${LogMsg} "Checking /delete-install-dir" ClearErrors - ${GetOptions} $R0 "/delete-install-dir" $R1 - IfErrors delete_install_dir_not_found - StrCpy $DeleteInstallDir 1 - delete_install_dir_not_found: + ${GetOptions} $cmdLineParams "/delete-install-dir" $R1 + ${If} ${Errors} + ${LogMsg} "/delete-install-dir not found" + ${Else} + ${LogMsg} "Found /delete-install-dir" + StrCpy $DeleteInstallDir 1 + ${EndIf} # Uninstaller: Remove Root Directory + ${LogMsg} "Checking /delete-root-dir" ClearErrors - ${GetOptions} $R0 "/delete-root-dir" $R1 - IfErrors delete_root_dir_not_found - StrCpy $DeleteRootDir 1 - delete_root_dir_not_found: + ${GetOptions} $cmdLineParams "/delete-root-dir" $R1 + ${If} ${Errors} + ${LogMsg} "/delete-root-dir not found" + ${Else} + ${LogMsg} "Found /delete-root-dir" + StrCpy $DeleteRootDir 1 + ${EndIf} FunctionEnd Function parseInstallerCommandLineSwitches + ${LogMsg} "Parsing command line parameters for the Installer" + # Load the parameters - ${GetParameters} $R0 - - # Display Help - ClearErrors - ${GetOptions} $R0 "/?" $R1 - IfErrors display_help_not_found - - System::Call 'kernel32::GetStdHandle(i -11)i.r0' - System::Call 'kernel32::AttachConsole(i -1)i.r1' - ${If} $0 = 0 - ${OrIf} $1 = 0 - System::Call 'kernel32::AllocConsole()' - System::Call 'kernel32::GetStdHandle(i -11)i.r0' - ${EndIf} - FileWrite $0 "$\n" - FileWrite $0 "$\n" - FileWrite $0 "Help for Salt Minion installation$\n" - FileWrite $0 "===============================================================================$\n" - FileWrite $0 "$\n" - FileWrite $0 "/minion-name=$\t$\tA string value to set the minion name. Default value is$\n" - FileWrite $0 "$\t$\t$\t'hostname'. Setting the minion name causes the installer$\n" - FileWrite $0 "$\t$\t$\tto use the default config or a custom config if defined$\n" - FileWrite $0 "$\n" - FileWrite $0 "/master=$\t$\tA string value to set the IP address or hostname of the$\n" - FileWrite $0 "$\t$\t$\tmaster. Default value is 'salt'. You may pass a single$\n" - FileWrite $0 "$\t$\t$\tmaster or a comma-separated list of masters. Setting$\n" - FileWrite $0 "$\t$\t$\tthe master will cause the installer to use the default$\n" - FileWrite $0 "$\t$\t$\tconfig or a custom config if defined$\n" - FileWrite $0 "$\n" - FileWrite $0 "/start-minion=$\t$\t1 will start the minion service, 0 will not.$\n" - FileWrite $0 "$\t$\t$\tDefault is 1$\n" - FileWrite $0 "$\n" - FileWrite $0 "/start-minion-delayed$\tSet the minion start type to 'Automatic (Delayed Start)'$\n" - FileWrite $0 "$\n" - FileWrite $0 "/default-config$\t$\tOverwrite the existing config if present with the$\n" - FileWrite $0 "$\t$\t$\tdefault config for salt. Default is to use the existing$\n" - FileWrite $0 "$\t$\t$\tconfig if present. If /master and/or /minion-name is$\n" - FileWrite $0 "$\t$\t$\tpassed, those values will be used to update the new$\n" - FileWrite $0 "$\t$\t$\tdefault config$\n" - FileWrite $0 "$\n" - FileWrite $0 "$\t$\t$\tAny existing config will be backed up by appending$\n" - FileWrite $0 "$\t$\t$\ta timestamp and a .bak extension. That includes$\n" - FileWrite $0 "$\t$\t$\tthe minion file and the minion.d directory$\n" - FileWrite $0 "$\n" - FileWrite $0 "/custom-config=$\t$\tA string value specifying the name of a custom config$\n" - FileWrite $0 "$\t$\t$\tfile in the same path as the installer or the full path$\n" - FileWrite $0 "$\t$\t$\tto a custom config file. If /master and/or /minion-name$\n" - FileWrite $0 "$\t$\t$\tis passed, those values will be used to update the new$\n" - FileWrite $0 "$\t$\t$\tcustom config$\n" - FileWrite $0 "$\n" - FileWrite $0 "$\t$\t$\tAny existing config will be backed up by appending$\n" - FileWrite $0 "$\t$\t$\ta timestamp and a .bak extension. That includes$\n" - FileWrite $0 "$\t$\t$\tthe minion file and the minion.d directory$\n" - FileWrite $0 "$\n" - FileWrite $0 "/install-dir=$\t$\tSpecify the installation location for the Salt binaries.$\n" - FileWrite $0 "$\t$\t$\tThis will be ignored for existing installations.$\n" - FileWrite $0 "$\n" - FileWrite $0 "/move-config$\t$\tIf config is found at C:\salt it will be moved to %ProgramData%$\n" - FileWrite $0 "$\n" - FileWrite $0 "/S$\t$\t$\tInstall Salt silently$\n" - FileWrite $0 "$\n" - FileWrite $0 "/?$\t$\t$\tDisplay this help screen$\n" - FileWrite $0 "$\n" - FileWrite $0 "-------------------------------------------------------------------------------$\n" - FileWrite $0 "$\n" - FileWrite $0 "Examples:$\n" - FileWrite $0 "$\n" - FileWrite $0 " $EXEFILE /S$\n" - FileWrite $0 "$\n" - FileWrite $0 " $EXEFILE /S /minion-name=myminion /master=master.mydomain.com /start-minion-delayed$\n" - FileWrite $0 "$\n" - FileWrite $0 " $EXEFILE /S /minion-name=myminion /master=master.mydomain.com /install-dir=$\"C:\Software\salt$\"$\n" - FileWrite $0 "$\n" - FileWrite $0 "===============================================================================$\n" - FileWrite $0 "$\n" - System::Free $0 - System::Free $1 - System::Call 'kernel32::FreeConsole()' - - # Give the user back the prompt - !define VK_RETURN 0x0D ; Enter Key - !define KEYEVENTF_EXTENDEDKEY 0x0001 - !define KEYEVENTF_KEYUP 0x0002 - System::Call "user32::keybd_event(i${VK_RETURN}, i0x45, i${KEYEVENTF_EXTENDEDKEY}|0, i0)" - System::Call "user32::keybd_event(i${VK_RETURN}, i0x45, i${KEYEVENTF_EXTENDEDKEY}|${KEYEVENTF_KEYUP}, i0)" - Abort - - display_help_not_found: - - # Set default value for Use Existing Config - StrCpy $ConfigType "Existing Config" + ${GetParameters} $cmdLineParams + ${LogMsg} "Passed: $cmdLineParams" # Check for start-minion switches - # /start-service is to be deprecated, so we must check for both - ${GetOptions} $R0 "/start-service=" $R1 - ${GetOptions} $R0 "/start-minion=" $R2 - - # Service: Start Salt Minion - ${IfNot} $R2 == "" + ${LogMsg} "Checking for /start-minion" + ${GetOptions} $cmdLineParams "/start-minion=" $R1 + ${IfNot} $R1 == "" + ${LogMsg} "Found /start-minion=$R1" # If start-minion was passed something, then set it - StrCpy $StartMinion $R2 - ${ElseIfNot} $R1 == "" - # If start-service was passed something, then set StartMinion to that StrCpy $StartMinion $R1 - MessageBox MB_OK|MB_ICONINFORMATION \ - "`/start-service` is being deprecated. Please use `/start-minion` \ - instead." /SD IDOK ${Else} # Otherwise default to 1 + ${LogMsg} "/start-minion not found. Using default" StrCpy $StartMinion 1 ${EndIf} # Service: Minion Startup Type Delayed + ${LogMsg} "Checking for /start-minion-delayed" ClearErrors - ${GetOptions} $R0 "/start-minion-delayed" $R1 - IfErrors start_minion_delayed_not_found - StrCpy $StartMinionDelayed 1 - start_minion_delayed_not_found: + ${GetOptions} $cmdLineParams "/start-minion-delayed" $R1 + ${If} ${Errors} + ${LogMsg} "/start-minion-delayed not found" + ${Else} + ${LogMsg} "Found /start-minion-delayed" + StrCpy $StartMinionDelayed 1 + ${EndIf} + + # Set default value for Use Existing Config + StrCpy $ConfigType "Existing Config" # Minion Config: Master IP/Name # If setting master, we don't want to use existing config - ${GetOptions} $R0 "/master=" $R1 - ${IfNot} $R1 == "" + ${LogMsg} "Checking for /master" + ${GetOptions} $cmdLineParams "/master=" $R1 + ${If} ${Errors} + ${LogMsg} "/master= not found. Using default" + StrCpy $MasterHost "salt" + ${ElseIfNot} $R1 == "" + ${LogMsg} "Found /master=$R1" StrCpy $MasterHost $R1 StrCpy $ConfigType "Default Config" - ${ElseIf} $MasterHost == "" + ${Else} + ${LogMsg} "/master found, but value not passed. Using default value" StrCpy $MasterHost "salt" ${EndIf} # Minion Config: Minion ID # If setting minion id, we don't want to use existing config - ${GetOptions} $R0 "/minion-name=" $R1 - ${IfNot} $R1 == "" + ${LogMsg} "Checking for /minion-name" + ${GetOptions} $cmdLineParams "/minion-name=" $R1 + ${If} ${Errors} + ${LogMsg} "/minion-name= not found. Using default" + StrCpy $MinionName "hostname" + ${ElseIfNot} $R1 == "" + ${LogMsg} "Found /minion-name=$R1" StrCpy $MinionName $R1 StrCpy $ConfigType "Default Config" - ${ElseIf} $MinionName == "" + ${Else} + ${LogMsg} "/minion-name= found, but value not passed. Using default" StrCpy $MinionName "hostname" ${EndIf} # Use Default Config + ${LogMsg} "Checking for /default-config" ClearErrors - ${GetOptions} $R0 "/default-config" $R1 - IfErrors default_config_not_found - StrCpy $ConfigType "Default Config" - default_config_not_found: + ${GetOptions} $cmdLineParams "/default-config" $R1 + ${If} ${Errors} + ${LogMsg} "/default-config not found" + ${Else} + ${LogMsg} "Found /default-config" + StrCpy $ConfigType "Default Config" + ${EndIf} # Use Custom Config # Set default value for Use Custom Config - StrCpy $CustomConfig "" + ${LogMsg} "Checking for /custom-config" # Existing config will get a `.bak` extension - ${GetOptions} $R0 "/custom-config=" $R1 - ${IfNot} $R1 == "" - # A Custom Config was passed, set it + ${GetOptions} $cmdLineParams "/custom-config=" $R1 + ${If} ${Errors} + ${LogMsg} "/custom-config= not found" + StrCpy $CustomConfig "" + ${ElseIfNot} $R1 == "" + ${LogMsg} "Found /custom-config=$R1" StrCpy $CustomConfig $R1 StrCpy $ConfigType "Custom Config" + ${Else} + ${LogMsg} "/custom-config= found, but value not passed" + StrCpy $CustomConfig "" ${EndIf} # Set Install Location + ${LogMsg} "Checking for /install-dir" ClearErrors - ${GetOptions} $R0 "/install-dir=" $R1 - ${IfNot} $R1 == "" + ${GetOptions} $cmdLineParams "/install-dir=" $R1 + ${If} ${Errors} + ${LogMsg} "/install-dir= not found" + StrCpy $CustomLocation "" + ${ElseIfNot} $R1 == "" # A Custom Location was passed, set it + ${LogMsg} "Found /install-dir=$R1" StrCpy $CustomLocation $R1 + ${Else} + ${LogMsg} "/install-dir= found, but value not passed" + StrCpy $CustomConfig "" ${EndIf} # Set Move Config Option + ${LogMsg} "Checking for /move-config" ClearErrors - ${GetOptions} $R0 "/move-config" $R1 - IfErrors move_config_not_found - StrCpy $MoveExistingConfig 1 - move_config_not_found: + ${GetOptions} $cmdLineParams "/move-config" $R1 + ${If} ${Errors} + ${LogMsg} "/move-config not found" + StrCpy $MoveExistingConfig 0 + ${Else} + ${LogMsg} "Found /move-config" + StrCpy $MoveExistingConfig 1 + ${EndIf} FunctionEnd diff --git a/pkg/windows/nsis/tests/setup.ps1 b/pkg/windows/nsis/tests/setup.ps1 index 2b136f66600..d63b4ed972a 100644 --- a/pkg/windows/nsis/tests/setup.ps1 +++ b/pkg/windows/nsis/tests/setup.ps1 @@ -94,8 +94,7 @@ $prereq_files | ForEach-Object { } } -$binary_files = @("python.exe", "ssm.exe") - +$binary_files = @("python.exe") $binary_files | ForEach-Object { Write-Host "Creating $_`: " -NoNewline Set-Content -Path "$BUILDENV_DIR\$_" -Value "binary" diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index c35b6e65546..b4d3ee93dea 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -163,7 +163,6 @@ if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) { Write-Result "Failed" -ForegroundColor Red exit 1 } - Write-Host $SSM_URL } # Copy the multiminion scripts to the Build directory diff --git a/requirements/base.txt b/requirements/base.txt index 34ea8c2f882..4fc9b69ab41 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,6 +1,6 @@ --constraint=constraints.txt -Jinja2 +Jinja2>=3.1.5 jmespath msgpack>=1.0.0 ; python_version < '3.13' msgpack>=1.1.0 ; python_version >= '3.13' @@ -18,6 +18,8 @@ packaging>=21.3 looseversion tornado>=6.3.3 aiohttp>=3.9.0 +urllib3>=1.26.18 + croniter>=0.3.0,!=0.3.22; sys_platform != 'win32' # We need contextvars for salt-ssh. diff --git a/requirements/static/ci/py3.10/changelog.txt b/requirements/static/ci/py3.10/changelog.txt index 962d840ca4a..6465c9f6ef3 100644 --- a/requirements/static/ci/py3.10/changelog.txt +++ b/requirements/static/ci/py3.10/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==22.10.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.10/linux.txt # towncrier diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 9cda5d1874a..a481db7d484 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -196,7 +196,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -545,6 +545,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 367514ad7f7..5207291100d 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -62,7 +62,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.10/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/docs.in diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 5c94b77ec11..73d5a6c7f13 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -195,7 +195,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -550,6 +550,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index e366c4fffdd..ead40c9db13 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -219,7 +219,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -614,6 +614,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 7a88b60c379..57fe8f9db4a 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -193,7 +193,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -497,6 +497,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/changelog.txt b/requirements/static/ci/py3.11/changelog.txt index 70831ca5aa8..0ec4b21d43c 100644 --- a/requirements/static/ci/py3.11/changelog.txt +++ b/requirements/static/ci/py3.11/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==17.5.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.11/linux.txt # towncrier diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 6d59ab753c3..2d604aaec2a 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -189,7 +189,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt @@ -536,6 +536,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt index bca5bf5fac9..7519292a8f7 100644 --- a/requirements/static/ci/py3.11/docs.txt +++ b/requirements/static/ci/py3.11/docs.txt @@ -62,7 +62,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.11/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/docs.in diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 3d710e0e329..1847d605dc0 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -188,7 +188,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt @@ -542,6 +542,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 805b8beb593..f8f875c20f9 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -210,7 +210,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt @@ -604,6 +604,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index af9d97d3b72..a71dfc17ed8 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -186,7 +186,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt @@ -488,6 +488,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/changelog.txt b/requirements/static/ci/py3.12/changelog.txt index e35cbed1512..e857893819c 100644 --- a/requirements/static/ci/py3.12/changelog.txt +++ b/requirements/static/ci/py3.12/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==17.5.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.12/linux.txt # towncrier diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 7a3b91cde0b..974066792db 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -259,7 +259,7 @@ jaraco.text==3.11.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt @@ -770,6 +770,7 @@ urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 59a6d022564..03adac275f1 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -189,7 +189,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # -r requirements/base.txt @@ -536,6 +536,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt index 50af05dac4e..27cce8217ed 100644 --- a/requirements/static/ci/py3.12/docs.txt +++ b/requirements/static/ci/py3.12/docs.txt @@ -111,7 +111,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt @@ -289,6 +289,7 @@ uc-micro-py==1.0.1 urllib3==1.26.18 # via # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt # requests yarl==1.9.4 # via diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index b9a10a8f045..a74a701cebd 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -188,7 +188,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # -r requirements/base.txt @@ -542,6 +542,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index 769708088f7..14aa64ad48c 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -286,7 +286,7 @@ jaraco.text==3.11.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt @@ -777,6 +777,7 @@ urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 35ab4dd5118..61c5d62ac3c 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -210,7 +210,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -r requirements/base.txt @@ -604,6 +604,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 5e9b05bc70d..9cabfb9f0e7 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -186,7 +186,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # -r requirements/base.txt @@ -488,6 +488,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.13/changelog.txt b/requirements/static/ci/py3.13/changelog.txt index 671e7abcf78..7a206c2e0cd 100644 --- a/requirements/static/ci/py3.13/changelog.txt +++ b/requirements/static/ci/py3.13/changelog.txt @@ -13,7 +13,7 @@ click==8.1.7 # towncrier incremental==24.7.2 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.13/linux.txt # towncrier diff --git a/requirements/static/ci/py3.13/cloud.txt b/requirements/static/ci/py3.13/cloud.txt index 0e97486b1ae..5704d7bfd50 100644 --- a/requirements/static/ci/py3.13/cloud.txt +++ b/requirements/static/ci/py3.13/cloud.txt @@ -246,7 +246,7 @@ jaraco.text==4.0.0 # -c requirements/static/ci/../pkg/py3.13/linux.txt # -c requirements/static/ci/py3.13/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt # -c requirements/static/ci/py3.13/linux.txt @@ -740,6 +740,7 @@ urllib3==2.2.3 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt # -c requirements/static/ci/py3.13/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.13/darwin.txt b/requirements/static/ci/py3.13/darwin.txt index 53a44f61f05..949c4e0a3d5 100644 --- a/requirements/static/ci/py3.13/darwin.txt +++ b/requirements/static/ci/py3.13/darwin.txt @@ -179,7 +179,7 @@ jaraco.text==4.0.0 # via # -c requirements/static/ci/../pkg/py3.13/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.13/darwin.txt # -r requirements/base.txt @@ -515,6 +515,7 @@ typing-extensions==4.12.2 urllib3==2.2.3 # via # -c requirements/static/ci/../pkg/py3.13/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.13/docs.txt b/requirements/static/ci/py3.13/docs.txt index 42fb95ac55b..e4feb90cb31 100644 --- a/requirements/static/ci/py3.13/docs.txt +++ b/requirements/static/ci/py3.13/docs.txt @@ -107,7 +107,7 @@ jaraco.text==4.0.0 # via # -c requirements/static/ci/py3.13/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.13/linux.txt # -r requirements/base.txt @@ -274,6 +274,7 @@ uc-micro-py==1.0.3 urllib3==2.2.3 # via # -c requirements/static/ci/py3.13/linux.txt + # -r requirements/base.txt # requests yarl==1.16.0 # via diff --git a/requirements/static/ci/py3.13/freebsd.txt b/requirements/static/ci/py3.13/freebsd.txt index 49d74ac24e7..28480979ac0 100644 --- a/requirements/static/ci/py3.13/freebsd.txt +++ b/requirements/static/ci/py3.13/freebsd.txt @@ -178,7 +178,7 @@ jaraco.text==4.0.0 # via # -c requirements/static/ci/../pkg/py3.13/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.13/freebsd.txt # -r requirements/base.txt @@ -519,6 +519,7 @@ typing-extensions==4.12.2 urllib3==2.2.3 # via # -c requirements/static/ci/../pkg/py3.13/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.13/lint.txt b/requirements/static/ci/py3.13/lint.txt index bb96c7c4146..8591eef0be0 100644 --- a/requirements/static/ci/py3.13/lint.txt +++ b/requirements/static/ci/py3.13/lint.txt @@ -272,7 +272,7 @@ jaraco.text==4.0.0 # -c requirements/static/ci/../pkg/py3.13/linux.txt # -c requirements/static/ci/py3.13/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt # -c requirements/static/ci/py3.13/linux.txt @@ -745,6 +745,7 @@ urllib3==2.2.3 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt # -c requirements/static/ci/py3.13/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.13/linux.txt b/requirements/static/ci/py3.13/linux.txt index ea6ee1f0d47..2b2276daa23 100644 --- a/requirements/static/ci/py3.13/linux.txt +++ b/requirements/static/ci/py3.13/linux.txt @@ -200,7 +200,7 @@ jaraco.text==4.0.0 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt # -r requirements/base.txt @@ -579,6 +579,7 @@ typing-extensions==4.12.2 urllib3==2.2.3 # via # -c requirements/static/ci/../pkg/py3.13/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.13/windows.txt b/requirements/static/ci/py3.13/windows.txt index 29be8123ce0..b844482188b 100644 --- a/requirements/static/ci/py3.13/windows.txt +++ b/requirements/static/ci/py3.13/windows.txt @@ -179,7 +179,7 @@ jaraco.text==4.0.0 # via # -c requirements/static/ci/../pkg/py3.13/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.13/windows.txt # -r requirements/base.txt @@ -470,6 +470,7 @@ typing-extensions==4.12.2 urllib3==2.2.3 # via # -c requirements/static/ci/../pkg/py3.13/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.8/freebsd-crypto.txt b/requirements/static/ci/py3.8/freebsd-crypto.txt deleted file mode 100644 index ce772a5d00b..00000000000 --- a/requirements/static/ci/py3.8/freebsd-crypto.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd-crypto.txt requirements/static/ci/crypto.in -# -m2crypto==0.38.0 - # via -r requirements/static/ci/crypto.in -pycryptodome==3.19.1 - # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/linux-crypto.txt b/requirements/static/ci/py3.8/linux-crypto.txt deleted file mode 100644 index 9d91dda3caf..00000000000 --- a/requirements/static/ci/py3.8/linux-crypto.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/linux-crypto.txt requirements/static/ci/crypto.in -# -m2crypto==0.38.0 - # via -r requirements/static/ci/crypto.in -pycryptodome==3.19.1 - # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/windows-crypto.txt b/requirements/static/ci/py3.8/windows-crypto.txt deleted file mode 100644 index 6f353e6190a..00000000000 --- a/requirements/static/ci/py3.8/windows-crypto.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows-crypto.txt requirements/static/ci/crypto.in -# -m2crypto==0.38.0 - # via -r requirements/static/ci/crypto.in -pycryptodome==3.19.1 - # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.9/changelog.txt b/requirements/static/ci/py3.9/changelog.txt index 4003a98c808..deda7e0fd5b 100644 --- a/requirements/static/ci/py3.9/changelog.txt +++ b/requirements/static/ci/py3.9/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==22.10.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.9/linux.txt # towncrier diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 2201b7af000..738327bcc71 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -196,7 +196,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -546,6 +546,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index bc711d0b691..ff40bab7188 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -66,7 +66,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.9/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/docs.in diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index e924c90b5c9..6dfcdd258b7 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -195,7 +195,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -551,6 +551,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 2ccf064cf5b..27db9b4f9b7 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -214,7 +214,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -603,6 +603,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 4c5734fbb6d..16667315053 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -193,7 +193,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -499,6 +499,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index 3697f08a03f..e7c5f4d4838 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -137,7 +137,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index 0b5c2360986..e3496ac6b8f 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -137,7 +137,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index c5b6f00d650..4dacffd79d4 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -139,7 +139,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 346451a41c6..8df08748676 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -69,7 +69,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -153,7 +153,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt index 9b22a278bc3..d74f1707aef 100644 --- a/requirements/static/pkg/py3.11/darwin.txt +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -135,7 +135,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt index 6bb214365a3..4113018000d 100644 --- a/requirements/static/pkg/py3.11/freebsd.txt +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -137,7 +137,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt index 1fca05825c7..e08e5f08b6d 100644 --- a/requirements/static/pkg/py3.11/linux.txt +++ b/requirements/static/pkg/py3.11/linux.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -139,7 +139,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index f411797b1d6..962937052fd 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -67,7 +67,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -151,7 +151,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt index 4c94e994465..9d1c3f56e97 100644 --- a/requirements/static/pkg/py3.12/darwin.txt +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -135,7 +135,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt index bd5dfd4c260..cb2f807d896 100644 --- a/requirements/static/pkg/py3.12/freebsd.txt +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -137,7 +137,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt index 88501b7da7a..f6915e57f8e 100644 --- a/requirements/static/pkg/py3.12/linux.txt +++ b/requirements/static/pkg/py3.12/linux.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -139,7 +139,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt index eab825e335d..444c121c52a 100644 --- a/requirements/static/pkg/py3.12/windows.txt +++ b/requirements/static/pkg/py3.12/windows.txt @@ -67,7 +67,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -151,7 +151,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.13/darwin.txt b/requirements/static/pkg/py3.13/darwin.txt index b89fef4abd9..15890c3b69a 100644 --- a/requirements/static/pkg/py3.13/darwin.txt +++ b/requirements/static/pkg/py3.13/darwin.txt @@ -59,7 +59,7 @@ jaraco.functools==4.1.0 # tempora jaraco.text==4.0.0 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -125,7 +125,9 @@ timelib==0.3.0 tornado==6.4.1 # via -r requirements/base.txt urllib3==2.2.3 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.16.0 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.13/freebsd.txt b/requirements/static/pkg/py3.13/freebsd.txt index 83ae6013b9d..4f14458f2c2 100644 --- a/requirements/static/pkg/py3.13/freebsd.txt +++ b/requirements/static/pkg/py3.13/freebsd.txt @@ -59,7 +59,7 @@ jaraco.functools==4.1.0 # tempora jaraco.text==4.0.0 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -125,7 +125,9 @@ timelib==0.3.0 tornado==6.4.1 # via -r requirements/base.txt urllib3==2.2.3 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.16.0 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.13/linux.txt b/requirements/static/pkg/py3.13/linux.txt index 7796f9d5124..29e777f5c0b 100644 --- a/requirements/static/pkg/py3.13/linux.txt +++ b/requirements/static/pkg/py3.13/linux.txt @@ -59,7 +59,7 @@ jaraco.functools==4.1.0 # tempora jaraco.text==4.0.0 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -127,7 +127,9 @@ timelib==0.3.0 tornado==6.4.1 # via -r requirements/base.txt urllib3==2.2.3 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.16.0 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.13/windows.txt b/requirements/static/pkg/py3.13/windows.txt index 63c9b1c2c42..48ac4ac3df8 100644 --- a/requirements/static/pkg/py3.13/windows.txt +++ b/requirements/static/pkg/py3.13/windows.txt @@ -61,7 +61,7 @@ jaraco.functools==4.1.0 # tempora jaraco.text==4.0.0 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -136,7 +136,9 @@ timelib==0.3.0 tornado==6.4.1 # via -r requirements/base.txt urllib3==2.2.3 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.14.2 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 30d2446ab00..02e0431d1ea 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -137,7 +137,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 958833b7124..9c668de7d94 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -137,7 +137,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index 9826a16cc05..1ed06590e18 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -139,7 +139,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index fe5e28e20f2..97c3735b18b 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -69,7 +69,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -154,7 +154,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py index 321ccf794b5..e531459ea68 100644 --- a/salt/_logging/impl.py +++ b/salt/_logging/impl.py @@ -299,6 +299,8 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta): except TypeError: # Python < 3.8 - We still need this for salt-ssh since it will use # the system python, and not out onedir. + # stacklevel was introduced in Py 3.8 + # must be running on old OS with Python 3.6 or 3.7 LOGGING_LOGGER_CLASS._log( self, level, diff --git a/salt/grains/core.py b/salt/grains/core.py index cc9b29aa0ce..541406c0f03 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -930,6 +930,10 @@ def _virtual(osdata): grains["virtual"] = "container" grains["virtual_subtype"] = "LXC" break + elif "podman" in output: + grains["virtual"] = "container" + grains["virtual_subtype"] = "Podman" + break elif "amazon" in output: grains["virtual"] = "Nitro" grains["virtual_subtype"] = "Amazon EC2" diff --git a/salt/minion.py b/salt/minion.py index ce19e12ffc3..c82d548f368 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -2505,7 +2505,7 @@ class Minion(MinionBase): else: data["fun"] = "state.highstate" data["arg"] = [] - self._handle_decoded_payload(data) + self.io_loop.add_callback(self._handle_decoded_payload, data) def _refresh_grains_watcher(self, refresh_interval_in_minutes): """ @@ -2526,6 +2526,7 @@ class Minion(MinionBase): } ) + @tornado.gen.coroutine def _fire_master_minion_start(self): include_grains = False if self.opts["start_event_grains"]: @@ -2533,13 +2534,13 @@ class Minion(MinionBase): # Send an event to the master that the minion is live if self.opts["enable_legacy_startup_events"]: # Old style event. Defaults to False in 3001 release. - self._fire_master_main( + yield self._fire_master_main( "Minion {} started at {}".format(self.opts["id"], time.asctime()), "minion_start", include_startup_grains=include_grains, ) # send name spaced event - self._fire_master_main( + yield self._fire_master_main( "Minion {} started at {}".format(self.opts["id"], time.asctime()), tagify([self.opts["id"], "start"], "minion"), include_startup_grains=include_grains, @@ -2978,7 +2979,7 @@ class Minion(MinionBase): # make the schedule to use the new 'functions' loader self.schedule.functions = self.functions self.pub_channel.on_recv(self._handle_payload) - self._fire_master_minion_start() + yield self._fire_master_minion_start() log.info("Minion is ready to receive requests!") # update scheduled job to run with the new master addr @@ -3227,7 +3228,7 @@ class Minion(MinionBase): self.setup_scheduler(before_connect=True) self.sync_connect_master() if self.connected: - self._fire_master_minion_start() + self.io_loop.add_callback(self._fire_master_minion_start) log.info("Minion is ready to receive requests!") # Make sure to gracefully handle SIGUSR1 @@ -3270,7 +3271,8 @@ class Minion(MinionBase): "minion is running under an init system." ) - self._fire_master_main( + self.io_loop.add_callback( + self._fire_master_main, "ping", "minion_ping", timeout_handler=ping_timeout_handler, diff --git a/salt/modules/baredoc.py b/salt/modules/baredoc.py index 7513b546919..1d91ade25a6 100644 --- a/salt/modules/baredoc.py +++ b/salt/modules/baredoc.py @@ -10,7 +10,6 @@ import ast import itertools import logging import os -from typing import Dict, List import salt.utils.doc import salt.utils.files @@ -36,7 +35,7 @@ def _get_module_name(tree, filename: str) -> str: return module_name -def _get_func_aliases(tree) -> Dict: +def _get_func_aliases(tree) -> dict: """ Get __func_alias__ dict for mapping function names """ @@ -54,7 +53,7 @@ def _get_func_aliases(tree) -> Dict: return fun_aliases -def _get_args(function: str) -> Dict: +def _get_args(function: str) -> dict: """ Given a function def, returns arguments and defaults """ @@ -128,7 +127,7 @@ def _parse_module_docs(module_path, mod_name=None): return salt.utils.doc.strip_rst(ret) -def _parse_module_functions(module_py: str, return_type: str) -> Dict: +def _parse_module_functions(module_py: str, return_type: str) -> dict: """ Parse module files for proper module_name and function name, then gather functions and possibly arguments @@ -161,7 +160,7 @@ def _parse_module_functions(module_py: str, return_type: str) -> Dict: return ret -def _get_files(name=False, type="states", return_type="args") -> List: +def _get_files(name=False, type="states", return_type="args") -> list: """ Determine if modules/states directories or files are requested diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py index fe1d4412d00..4c7fd40d02a 100644 --- a/salt/modules/cmdmod.py +++ b/salt/modules/cmdmod.py @@ -283,7 +283,10 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd): new_cmd.append("-Command") if isinstance(cmd, list): cmd = " ".join(cmd) - new_cmd.append(f"& {cmd.strip()}") + # We need to append $LASTEXITCODE here to return the actual exit code + # from the script. Otherwise, it will always return 1 on any non-zero + # exit code failure. Issue: #60884 + new_cmd.append(f"& {cmd.strip()}; exit $LASTEXITCODE") elif encoded_cmd: new_cmd.extend(["-EncodedCommand", f"{cmd}"]) else: @@ -293,10 +296,10 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd): # Commands that are a specific keyword behave differently. They fail if # you add a "&" to the front. Add those here as we find them: - keywords = ["$", "&", ".", "Configuration"] + keywords = ["$", "&", ".", "Configuration", "try"] for keyword in keywords: - if cmd.startswith(keyword): + if cmd.lower().startswith(keyword.lower()): new_cmd.extend(["-Command", f"{cmd.strip()}"]) break else: @@ -455,8 +458,6 @@ def _run( if isinstance(cmd, (list, tuple)): cmd = " ".join(cmd) - return win_runas(cmd, runas, password, cwd) - if runas and salt.utils.platform.is_darwin(): # We need to insert the user simulation into the command itself and not # just run it from the environment on macOS as that method doesn't work @@ -489,7 +490,7 @@ def _run( # hang. runas = None - if runas: + if runas and not salt.utils.platform.is_windows(): # Save the original command before munging it try: pwd.getpwnam(runas) @@ -510,7 +511,7 @@ def _run( else: use_sudo = True - if runas or group: + if (runas or group) and not salt.utils.platform.is_windows(): try: # Getting the environment for the runas user # Use markers to thwart any stdout noise @@ -749,90 +750,104 @@ def _run( if not use_vt: # This is where the magic happens - try: + + if runas and salt.utils.platform.is_windows(): + + # We can't use TimedProc with runas on Windows if change_windows_codepage: salt.utils.win_chcp.set_codepage_id(windows_codepage) - try: - proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs) - except OSError as exc: - msg = "Unable to run command '{}' with the context '{}', reason: {}".format( - cmd if output_loglevel is not None else "REDACTED", - new_kwargs, - exc, - ) - raise CommandExecutionError(msg) - try: - proc.run() - except TimedProcTimeoutError as exc: - ret["stdout"] = str(exc) - ret["stderr"] = "" - ret["retcode"] = None - ret["pid"] = proc.process.pid - # ok return code for timeouts? - ret["retcode"] = 1 - return ret - finally: + ret = win_runas(cmd, runas, password, cwd) + if change_windows_codepage: salt.utils.win_chcp.set_codepage_id(previous_windows_codepage) - if output_loglevel != "quiet" and output_encoding is not None: - log.debug( - "Decoding output from command %s using %s encoding", - cmd, - output_encoding, - ) + else: + try: + if change_windows_codepage: + salt.utils.win_chcp.set_codepage_id(windows_codepage) + try: + proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs) + except OSError as exc: + msg = "Unable to run command '{}' with the context '{}', reason: {}".format( + cmd if output_loglevel is not None else "REDACTED", + new_kwargs, + exc, + ) + raise CommandExecutionError(msg) - try: - out = salt.utils.stringutils.to_unicode( - proc.stdout, encoding=output_encoding - ) - except TypeError: - # stdout is None - out = "" - except UnicodeDecodeError: - out = salt.utils.stringutils.to_unicode( - proc.stdout, encoding=output_encoding, errors="replace" - ) - if output_loglevel != "quiet": - log.error( - "Failed to decode stdout from command %s, non-decodable " - "characters have been replaced", - _log_cmd(cmd), + try: + proc.run() + except TimedProcTimeoutError as exc: + ret["stdout"] = str(exc) + ret["stderr"] = "" + ret["retcode"] = None + ret["pid"] = proc.process.pid + # ok return code for timeouts? + ret["retcode"] = 1 + return ret + finally: + if change_windows_codepage: + salt.utils.win_chcp.set_codepage_id(previous_windows_codepage) + + if output_loglevel != "quiet" and output_encoding is not None: + log.debug( + "Decoding output from command %s using %s encoding", + cmd, + output_encoding, ) - try: - err = salt.utils.stringutils.to_unicode( - proc.stderr, encoding=output_encoding - ) - except TypeError: - # stderr is None - err = "" - except UnicodeDecodeError: - err = salt.utils.stringutils.to_unicode( - proc.stderr, encoding=output_encoding, errors="replace" - ) - if output_loglevel != "quiet": - log.error( - "Failed to decode stderr from command %s, non-decodable " - "characters have been replaced", - _log_cmd(cmd), + try: + out = salt.utils.stringutils.to_unicode( + proc.stdout, encoding=output_encoding ) + except TypeError: + # stdout is None + out = "" + except UnicodeDecodeError: + out = salt.utils.stringutils.to_unicode( + proc.stdout, encoding=output_encoding, errors="replace" + ) + if output_loglevel != "quiet": + log.error( + "Failed to decode stdout from command %s, non-decodable " + "characters have been replaced", + _log_cmd(cmd), + ) + + try: + err = salt.utils.stringutils.to_unicode( + proc.stderr, encoding=output_encoding + ) + except TypeError: + # stderr is None + err = "" + except UnicodeDecodeError: + err = salt.utils.stringutils.to_unicode( + proc.stderr, encoding=output_encoding, errors="replace" + ) + if output_loglevel != "quiet": + log.error( + "Failed to decode stderr from command %s, non-decodable " + "characters have been replaced", + _log_cmd(cmd), + ) + + # Encoded commands dump CLIXML data in stderr. It's not an actual error + if encoded_cmd and "CLIXML" in err: + err = "" + if rstrip: + if out is not None: + out = out.rstrip() + if err is not None: + err = err.rstrip() + ret["pid"] = proc.process.pid + ret["retcode"] = proc.process.returncode + ret["stdout"] = out + ret["stderr"] = err - # Encoded commands dump CLIXML data in stderr. It's not an actual error - if encoded_cmd and "CLIXML" in err: - err = "" - if rstrip: - if out is not None: - out = out.rstrip() - if err is not None: - err = err.rstrip() - ret["pid"] = proc.process.pid - ret["retcode"] = proc.process.returncode if ret["retcode"] in success_retcodes: ret["retcode"] = 0 - ret["stdout"] = out - ret["stderr"] = err if any( [stdo in ret["stdout"] for stdo in success_stdout] + [stde in ret["stderr"] for stde in success_stderr] @@ -4096,16 +4111,16 @@ def powershell( # ConvertTo-JSON is only available on PowerShell 3.0 and later psversion = shell_info("powershell")["psversion"] if salt.utils.versions.version_cmp(psversion, "2.0") == 1: - cmd += " | ConvertTo-JSON" + cmd += " | ConvertTo-JSON " if depth is not None: - cmd += f" -Depth {depth}" + cmd += f"-Depth {depth} " # Put the whole command inside a try / catch block # Some errors in PowerShell are not "Terminating Errors" and will not be # caught in a try/catch block. For example, the `Get-WmiObject` command will # often return a "Non Terminating Error". To fix this, make sure # `-ErrorAction Stop` is set in the powershell command - cmd = "try {" + cmd + '} catch { "{}" }' + cmd = "try { " + cmd + ' } catch { "{}" }' if encode_cmd: # Convert the cmd to UTF-16LE without a BOM and base64 encode. @@ -4117,7 +4132,7 @@ def powershell( cmd = salt.utils.stringutils.to_str(cmd) encoded_cmd = True else: - cmd = f"{{{cmd}}}" + cmd = f"{{ {cmd} }}" encoded_cmd = False # Retrieve the response, while overriding shell with 'powershell' diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py index e3de699d625..0bcc2ac9163 100644 --- a/salt/modules/win_file.py +++ b/salt/modules/win_file.py @@ -497,13 +497,14 @@ def get_group(path, follow_symlinks=True): def uid_to_user(uid): """ - Convert a uid to a user name + Convert a User ID (uid) to a username Args: uid (str): The user id to lookup Returns: - str: The name of the user + str: The name of the user. The ``uid`` will be returned if there is no + corresponding username CLI Example: diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index 8b834bbd33e..9d1ee62bd8c 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -45,6 +45,7 @@ import re import sys import time import urllib.parse +from fnmatch import fnmatch from functools import cmp_to_key import salt.fileserver @@ -275,6 +276,11 @@ def list_available(*names, **kwargs): return_dict_always (bool): Default ``False`` dict when a single package name is queried. + reverse_sort (bool): + Sort the versions for latest to oldest + + .. versionadded:: 3007.2 + Returns: dict: The package name with its available versions @@ -298,12 +304,15 @@ def list_available(*names, **kwargs): return_dict_always = salt.utils.data.is_true( kwargs.get("return_dict_always", False) ) + reverse_sort = salt.utils.data.is_true(kwargs.get("reverse_sort", False)) if len(names) == 1 and not return_dict_always: pkginfo = _get_package_info(names[0], saltenv=saltenv) if not pkginfo: return "" versions = sorted( - list(pkginfo.keys()), key=cmp_to_key(_reverse_cmp_pkg_versions) + list(pkginfo.keys()), + key=cmp_to_key(_reverse_cmp_pkg_versions), + reverse=reverse_sort, ) else: versions = {} @@ -314,11 +323,82 @@ def list_available(*names, **kwargs): verlist = sorted( list(pkginfo.keys()) if pkginfo else [], key=cmp_to_key(_reverse_cmp_pkg_versions), + reverse=reverse_sort, ) versions[name] = verlist return versions +def list_repo_pkgs(*args, saltenv="base", **kwargs): + """ + .. versionadded:: 3007.2 + + This function was added to match a similar function in Linux. It will + return all available packages. Optionally, package names (and name globs) + can be passed and the results will be filtered to packages matching those + names. + + This function can be helpful in discovering the version or repo to specify + in a :mod:`pkg.installed ` state. + + The return data will be a dictionary mapping package names to a list of + version numbers, ordered from newest to oldest. For example: + + .. code-block:: python + + { + 'bash': ['4.3-14ubuntu1.1', + '4.3-14ubuntu1'], + 'nginx': ['1.10.0-0ubuntu0.16.04.4', + '1.9.15-0ubuntu1'] + } + + CLI Examples: + + .. code-block:: bash + + salt '*' pkg.list_repo_pkgs + salt '*' pkg.list_repo_pkgs foo bar baz + """ + + # Get all the repo data + pkgs = get_repo_data(saltenv=saltenv).get("repo", {}) + + # Generate a list of packages and their available versions + repo_pkgs = {} + for pkg in pkgs: + repo_pkgs.update( + { + pkg: sorted( + list(pkgs[pkg].keys()), + key=cmp_to_key(_reverse_cmp_pkg_versions), + reverse=True, + ) + } + ) + + # If no args passed, just return everything + if not args: + return repo_pkgs + + # Loop through the args and return info for each specified package + ret = {} + for arg in args: + if "=" in arg: + pkg_name, pkg_version = arg.split("=") + else: + pkg_name = arg + pkg_version = "" + for pkg in repo_pkgs: + if fnmatch(pkg, pkg_name): + if pkg_version and pkg_version in repo_pkgs[pkg]: + ret.setdefault(pkg, []).append(pkg_version) + else: + ret.setdefault(pkg, []).extend(repo_pkgs[pkg]) + + return ret + + def version(*names, **kwargs): """ Returns a string representing the package version or an empty string if not diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index 9b2ec22b420..5df06607782 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -15,6 +15,9 @@ Support for YUM/DNF .. versionadded:: 3003 Support for ``tdnf`` on Photon OS. +.. versionadded:: 3006.10 + Support for ``dnf5``` on Fedora 41 + .. versionadded:: 3007.0 Support for ``dnf5``` on Fedora 39 """ @@ -1034,7 +1037,7 @@ def list_upgrades(refresh=True, **kwargs): cmd = ["--quiet"] cmd.extend(options) - cmd.extend(["list", "upgrades" if _yum() in ("dnf", "dnf5") else "updates"]) + cmd.extend(["list", "--upgrades" if _yum() in ("dnf", "dnf5") else "updates"]) out = _call_yum(cmd, ignore_retcode=True) if out["retcode"] != 0 and "Error:" in out: return {} @@ -1058,7 +1061,7 @@ def list_downloaded(**kwargs): salt '*' pkg.list_downloaded """ - CACHE_DIR = os.path.join("/var/cache/", _yum()) + CACHE_DIR = os.path.join("/var/cache", _yum()) ret = {} for root, dirnames, filenames in salt.utils.path.os_walk(CACHE_DIR): @@ -1428,8 +1431,8 @@ def install( 'version': '', 'arch': ''}}} """ - if (version := kwargs.get("version")) is not None: - kwargs["version"] = str(version) + if kwargs.get("version") is not None: + kwargs["version"] = str(kwargs["version"]) options = _get_options(**kwargs) if salt.utils.data.is_true(refresh): @@ -1987,7 +1990,7 @@ def upgrade( salt '*' pkg.upgrade security=True exclude='kernel*' """ if _yum() in ("dnf", "dnf5") and not obsoletes: - # for dnf we can just disable obsoletes + # for dnf[5] we can just disable obsoletes _setopt = [ opt for opt in salt.utils.args.split_input(kwargs.pop("setopt", [])) @@ -2079,7 +2082,7 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd - from killing any yum/dnf commands spawned by Salt when the + from killing any yum/dnf[5] commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option @@ -2183,7 +2186,7 @@ def purge(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd - from killing any yum/dnf commands spawned by Salt when the + from killing any yum/dnf[5] commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option @@ -3329,12 +3332,12 @@ def download(*packages, **kwargs): .. versionadded:: 2015.5.0 Download packages to the local disk. Requires ``yumdownloader`` from - ``yum-utils`` package. + ``yum-utils`` or ``dnf-utils`` package. .. note:: - ``yum-utils`` will already be installed on the minion if the package - was installed from the Fedora / EPEL repositories. + ``yum-utils`` or ``dnf-utils`` will already be installed on the minion + if the package was installed from the EPEL / Fedora repositories. CLI Example: @@ -3349,7 +3352,7 @@ def download(*packages, **kwargs): if not packages: raise SaltInvocationError("No packages were specified") - CACHE_DIR = "/var/cache/yum/packages" + CACHE_DIR = os.path.join("/var/cache", _yum(), "packages") if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR) cached_pkgs = os.listdir(CACHE_DIR) @@ -3530,12 +3533,17 @@ def services_need_restart(**kwargs): salt '*' pkg.services_need_restart """ - if _yum() != "dnf": - raise CommandExecutionError("dnf is required to list outdated services.") + if _yum() not in ("dnf", "dnf5"): + raise CommandExecutionError( + "dnf or dnf5 is required to list outdated services." + ) if not salt.utils.systemd.booted(__context__): raise CommandExecutionError("systemd is required to list outdated services.") - cmd = ["dnf", "--quiet", "needs-restarting"] + if _yum() == "dnf5": + cmd = ["dnf5", "--quiet", "needs-restarting"] + else: + cmd = ["dnf", "--quiet", "needs-restarting"] dnf_output = __salt__["cmd.run_stdout"](cmd, python_shell=False) if not dnf_output: return [] diff --git a/salt/platform/win.py b/salt/platform/win.py index ec63dbe319f..a521fdd974c 100644 --- a/salt/platform/win.py +++ b/salt/platform/win.py @@ -185,7 +185,12 @@ class HANDLE(wintypes.HANDLE): def Close(self, CloseHandle=kernel32.CloseHandle): if self and not getattr(self, "closed", False): - CloseHandle(self.Detach()) + try: + CloseHandle(self.Detach()) + except OSError: + # Suppress the error when there is no handle (WinError 6) + if ctypes.get_last_error() == 6: + pass __del__ = Close diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 46b127bfcf7..91067fc41a3 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -492,9 +492,7 @@ class GitProvider: self._cache_basename = "_" if self.id.startswith("__env__"): try: - self._cache_basename = self.get_checkout_target().replace( - "/", "-" - ) # replace '/' with '-' to not cause trouble with file-system + self._cache_basename = self.get_checkout_target() except AttributeError: log.critical( @@ -2801,6 +2799,33 @@ class GitBase: try: # Find and place fetch_request file for all the other branches for this repo repo_work_hash = os.path.split(repo.get_salt_working_dir())[0] + branches = [ + os.path.relpath(path, repo_work_hash) + for (path, subdirs, files) in os.walk(repo_work_hash) + if not subdirs + ] + + for branch in branches: + # Don't place fetch request in current branch being updated + if branch == repo.get_cache_basename(): + continue + branch_salt_dir = salt.utils.path.join(repo_work_hash, branch) + fetch_path = salt.utils.path.join( + branch_salt_dir, "fetch_request" + ) + if os.path.isdir(branch_salt_dir): + try: + with salt.utils.files.fopen(fetch_path, "w"): + pass + except OSError as exc: # pylint: disable=broad-except + log.error( + "Failed to make fetch request: %s %s", + fetch_path, + exc, + exc_info=True, + ) + else: + log.error("Failed to make fetch request: %s", fetch_path) for branch in os.listdir(repo_work_hash): # Don't place fetch request in current branch being updated if branch == repo.get_cache_basename(): diff --git a/salt/utils/http.py b/salt/utils/http.py index 67fc05ce469..fd296788f46 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -646,7 +646,7 @@ def query( decode_body=decode_body, ) return ret - except (socket.herror, OSError, socket.timeout, socket.gaierror) as exc: + except (socket.herror, OSError, TimeoutError, socket.gaierror) as exc: if status is True: ret["status"] = 0 ret["error"] = str(exc) diff --git a/salt/utils/reactor.py b/salt/utils/reactor.py index 5b9138636b2..10ba329637e 100644 --- a/salt/utils/reactor.py +++ b/salt/utils/reactor.py @@ -473,22 +473,42 @@ class ReactWrap: """ Wrap RunnerClient for executing :ref:`runner modules ` """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "runner" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for runner") + low = {"state": "runner"} + self.populate_client_cache(low) return self.pool.fire_async(self.client_cache["runner"].low, args=(fun, kwargs)) def wheel(self, fun, **kwargs): """ Wrap Wheel to enable executing :ref:`wheel modules ` """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "wheel" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for wheel") + low = {"state": "wheel"} + self.populate_client_cache(low) return self.pool.fire_async(self.client_cache["wheel"].low, args=(fun, kwargs)) def local(self, fun, tgt, **kwargs): """ Wrap LocalClient for running :ref:`execution modules ` """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "local" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for local") + low = {"state": "local"} + self.populate_client_cache(low) self.client_cache["local"].cmd_async(tgt, fun, **kwargs) def caller(self, fun, **kwargs): """ Wrap LocalCaller to execute remote exec functions locally on the Minion """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "caller" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for caller") + low = {"state": "caller"} + self.populate_client_cache(low) self.client_cache["caller"].cmd(fun, *kwargs["arg"], **kwargs["kwarg"]) diff --git a/salt/utils/win_dacl.py b/salt/utils/win_dacl.py index 346dac41de5..70779588262 100644 --- a/salt/utils/win_dacl.py +++ b/salt/utils/win_dacl.py @@ -125,7 +125,7 @@ should match what you see when you look at the properties for an object. - subfolders_only: Applies to all containers beneath this object - files_only: Applies to all file objects beneath this object - .. NOTE:: + .. note:: 'applies to' properties can only be modified on directories. Files will always be ``this_folder_only``. @@ -883,10 +883,7 @@ def dacl(obj_name=None, obj_type="file"): """ # Get the principal from the sid (object sid) sid = win32security.ConvertSidToStringSid(ace[2]) - try: - principal = get_name(sid) - except CommandExecutionError: - principal = sid + principal = get_name(sid) # Get the ace type ace_type = self.ace_type[ace[0][0]] @@ -1194,14 +1191,17 @@ def get_name(principal): principal (str): Find the Normalized name based on this. Can be a PySID object, a SID - string, or a user name in any capitalization. + string, or a username in any capitalization. .. note:: - Searching based on the user name can be slow on hosts connected + Searching based on the username can be slow on hosts connected to large Active Directory domains. Returns: - str: The name that corresponds to the passed principal + str: The username that corresponds to the passed principal. If there is + no corresponding username, the string SID will be returned. + Capability SIDs will return ``None``. + Usage: @@ -1246,7 +1246,7 @@ def get_name(principal): name = f"NT Service\\{name}" return name - except (pywintypes.error, TypeError) as exc: + except pywintypes.error as exc: # Microsoft introduced the concept of Capability SIDs in Windows 8 # https://docs.microsoft.com/en-us/windows/security/identity-protection/access-control/security-identifiers#capability-sids # https://support.microsoft.com/en-us/help/4502539/some-sids-do-not-resolve-into-friendly-names @@ -1254,11 +1254,27 @@ def get_name(principal): # These types of SIDs do not resolve, so we'll just ignore them for this # All capability SIDs begin with `S-1-15-3`, so we'll only throw an # error when the sid does not begin with `S-1-15-3` - if not str_sid.startswith("S-1-15-3"): - message = f'Error resolving "{principal}"' - if type(exc) == pywintypes.error: - win_error = win32api.FormatMessage(exc.winerror).rstrip("\n") - message = f"{message}: {win_error}" + # 1332: No mapping between account names and security IDs was done + if exc.winerror == 1332: + # Capability SID, return None + if str_sid.startswith("S-1-15-3"): + log.debug("Name mapping not available for capability SID: %s", str_sid) + return None + + # User does not exist on the system or is on a disconnected domain + # Return the SID + else: + log.debug( + "Could not resolve SID: %s\nThe user has either been removed " + "from the system or is a domain user and the system is not " + "connected to the domain", + str_sid, + ) + return str_sid + + # Some other unknown error + else: + message = f'Error resolving "{principal}: {exc.strerror}"' log.exception(message) raise CommandExecutionError(message, exc) @@ -2242,13 +2258,19 @@ def _check_perms(obj_name, obj_type, new_perms, access_mode, ret, test_mode=Fals cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type) changes = {} for user in new_perms: - applies_to_text = "" # Check that user exists: - try: - user_name = get_name(principal=user) - except CommandExecutionError: + user_name = get_name(principal=user) + # username will be the SID if there is no corresponding username + if user_name == get_sid_string(principal=user): ret["comment"].append( - '{} Perms: User "{}" missing from Target System'.format( + "{} Perms: Could not find a corresponding username for: {}".format( + access_mode.capitalize(), user + ) + ) + continue + if user_name is None: + ret["comment"].append( + "{} Perms: Skipping Capability SID: {}".format( access_mode.capitalize(), user ) ) @@ -2471,7 +2493,7 @@ def check_perms( log.debug("Resetting permissions for %s", obj_name) cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type) for user_name in cur_perms["Not Inherited"]: - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (grant_perms or {})}: if "grant" in cur_perms["Not Inherited"][user_name]: ret["changes"].setdefault("remove_perms", {}) @@ -2489,7 +2511,7 @@ def check_perms( ret["changes"]["remove_perms"].update( {user_name: cur_perms["Not Inherited"][user_name]} ) - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (deny_perms or {})}: if "deny" in cur_perms["Not Inherited"][user_name]: ret["changes"].setdefault("remove_perms", {}) @@ -2541,7 +2563,7 @@ def check_perms( log.debug("Resetting permissions for %s", obj_name) cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type) for user_name in cur_perms["Not Inherited"]: - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (grant_perms or {})}: if "grant" in cur_perms["Not Inherited"][user_name]: rm_permissions( @@ -2550,7 +2572,7 @@ def check_perms( ace_type="grant", obj_type=obj_type, ) - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (deny_perms or {})}: if "deny" in cur_perms["Not Inherited"][user_name]: rm_permissions( @@ -2582,14 +2604,9 @@ def _set_perms(obj_dacl, obj_type, new_perms, cur_perms, access_mode): ret = {} for user in new_perms: # Check that user exists: - try: - user_name = get_name(user) - except CommandExecutionError: - log.debug( - '%s Perms: User "%s" missing from Target System', - access_mode.capitalize(), - user, - ) + user_name = get_name(user) + # We want to skip unmapped usernames + if user_name == get_sid_string(user): continue # Get applies_to diff --git a/salt/utils/win_runas.py b/salt/utils/win_runas.py index fc8c9c82be5..aa2df51dbfc 100644 --- a/salt/utils/win_runas.py +++ b/salt/utils/win_runas.py @@ -187,8 +187,10 @@ def runas(cmdLine, username, password=None, cwd=None): | win32process.CREATE_SUSPENDED ) + flags = win32con.STARTF_USESTDHANDLES + flags |= win32con.STARTF_USESHOWWINDOW startup_info = salt.platform.win.STARTUPINFO( - dwFlags=win32con.STARTF_USESTDHANDLES, + dwFlags=flags, hStdInput=stdin_read.handle, hStdOutput=stdout_write.handle, hStdError=stderr_write.handle, @@ -197,6 +199,9 @@ def runas(cmdLine, username, password=None, cwd=None): # Create the environment for the user env = create_env(user_token, False) + if "&&" in cmdLine: + cmdLine = f'cmd /c "{cmdLine}"' + hProcess = None try: # Start the process in a suspended state. @@ -286,13 +291,18 @@ def runas_unpriv(cmd, username, password, cwd=None): dupin = salt.platform.win.DuplicateHandle(srchandle=stdin, inherit=True) # Get startup info structure + flags = win32con.STARTF_USESTDHANDLES + flags |= win32con.STARTF_USESHOWWINDOW startup_info = salt.platform.win.STARTUPINFO( - dwFlags=win32con.STARTF_USESTDHANDLES, + dwFlags=flags, hStdInput=dupin, hStdOutput=c2pwrite, hStdError=errwrite, ) + if "&&" in cmd: + cmd = f'cmd /c "{cmd}"' + try: # Run command and return process info structure process_info = salt.platform.win.CreateProcessWithLogonW( diff --git a/tests/pytests/functional/modules/cmd/test_run_win.py b/tests/pytests/functional/modules/cmd/test_run_win.py new file mode 100644 index 00000000000..cf41eb50280 --- /dev/null +++ b/tests/pytests/functional/modules/cmd/test_run_win.py @@ -0,0 +1,50 @@ +import pytest + +pytestmark = [ + pytest.mark.core_test, + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture(scope="module") +def account(): + with pytest.helpers.create_account() as _account: + yield _account + + +@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows") +@pytest.mark.parametrize( + "exit_code, return_code, result", + [ + (300, 0, True), + (299, 299, False), + ], +) +def test_windows_script_exitcode(modules, state_tree, exit_code, return_code, result): + ret = modules.state.single( + "cmd.run", name=f"cmd.exe /c exit {exit_code}", success_retcodes=[2, 44, 300] + ) + assert ret.result is result + assert ret.filtered["changes"]["retcode"] == return_code + + +@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows") +@pytest.mark.parametrize( + "exit_code, return_code, result", + [ + (300, 0, True), + (299, 299, False), + ], +) +def test_windows_script_exitcode_runas( + modules, state_tree, exit_code, return_code, result, account +): + ret = modules.state.single( + "cmd.run", + name=f"cmd.exe /c exit {exit_code}", + success_retcodes=[2, 44, 300], + runas=account.username, + password=account.password, + ) + assert ret.result is result + assert ret.filtered["changes"]["retcode"] == return_code diff --git a/tests/pytests/functional/modules/cmd/test_script.py b/tests/pytests/functional/modules/cmd/test_script.py index dcdd632fa70..9cd8fa85e08 100644 --- a/tests/pytests/functional/modules/cmd/test_script.py +++ b/tests/pytests/functional/modules/cmd/test_script.py @@ -13,6 +13,17 @@ def cmd(modules): return modules.cmd +@pytest.fixture(scope="module") +def exitcode_script(state_tree): + exit_code = 12345 + script_contents = f""" + Write-Host "Expected exit code: {exit_code}" + exit {exit_code} + """ + with pytest.helpers.temp_file("exit_code.ps1", script_contents, state_tree): + yield exit_code + + @pytest.fixture(params=["powershell", "pwsh"]) def shell(request): """ @@ -85,3 +96,9 @@ def test_windows_script_args_powershell_runas(cmd, shell, account, issue_56195): ) assert ret["stdout"] == password + + +@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows") +def test_windows_script_exitcode(cmd, shell, exitcode_script): + ret = cmd.script("salt://exit_code.ps1", shell=shell, saltenv="base") + assert ret["retcode"] == exitcode_script diff --git a/tests/pytests/functional/modules/test_win_pkg.py b/tests/pytests/functional/modules/test_win_pkg.py index 6bcfaa9bd84..1b8bb35ca83 100644 --- a/tests/pytests/functional/modules/test_win_pkg.py +++ b/tests/pytests/functional/modules/test_win_pkg.py @@ -13,9 +13,26 @@ def pkg_def_contents(state_tree): my-software: '1.0.1': full_name: 'My Software' - installer: 'C:\files\mysoftware.msi' + installer: 'C:\files\mysoftware101.msi' install_flags: '/qn /norestart' - uninstaller: 'C:\files\mysoftware.msi' + uninstaller: 'C:\files\mysoftware101.msi' + uninstall_flags: '/qn /norestart' + msiexec: True + reboot: False + '1.0.2': + full_name: 'My Software' + installer: 'C:\files\mysoftware102.msi' + install_flags: '/qn /norestart' + uninstaller: 'C:\files\mysoftware102.msi' + uninstall_flags: '/qn /norestart' + msiexec: True + reboot: False + your-software: + '1.0.0': + full_name: 'Your Software' + installer: 'C:\files\yoursoftware101.msi' + install_flags: '/qn /norestart' + uninstaller: 'C:\files\yoursoftware101.msi' uninstall_flags: '/qn /norestart' msiexec: True reboot: False @@ -27,9 +44,48 @@ def pkg(modules): yield modules.pkg -def test_refresh_db(pkg, pkg_def_contents, state_tree, minion_opts): +@pytest.fixture(scope="function") +def repo(pkg, state_tree, pkg_def_contents): assert len(pkg.get_package_info("my-software")) == 0 repo_dir = state_tree / "winrepo_ng" with pytest.helpers.temp_file("my-software.sls", pkg_def_contents, repo_dir): pkg.refresh_db() - assert len(pkg.get_package_info("my-software")) == 1 + + +def test_refresh_db(pkg, repo): + assert len(pkg.get_package_info("my-software")) == 2 + assert len(pkg.get_package_info("your-software")) == 1 + + +@pytest.mark.parametrize( + "as_dict, reverse, expected", + [ + (False, False, ["1.0.1", "1.0.2"]), + (False, True, ["1.0.2", "1.0.1"]), + (True, False, {"my-software": ["1.0.1", "1.0.2"]}), + (True, True, {"my-software": ["1.0.2", "1.0.1"]}), + ], +) +def test_list_available(pkg, repo, as_dict, reverse, expected): + result = pkg.list_available( + "my-software", return_dict_always=as_dict, reverse_sort=reverse + ) + assert result == expected + + +@pytest.mark.parametrize( + "pkg_name, expected", + [ + ("my-software", {"my-software": ["1.0.2", "1.0.1"]}), + ("my-software=1.0.1", {"my-software": ["1.0.1"]}), + ("my-soft*", {"my-software": ["1.0.2", "1.0.1"]}), + ("your-software", {"your-software": ["1.0.0"]}), + (None, {"my-software": ["1.0.2", "1.0.1"], "your-software": ["1.0.0"]}), + ], +) +def test_list_repo_pkgs(pkg, repo, pkg_name, expected): + if pkg_name is None: + result = pkg.list_repo_pkgs() + else: + result = pkg.list_repo_pkgs(pkg_name) + assert result == expected diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py index d179eaa4ca0..31f85ef1133 100644 --- a/tests/pytests/functional/states/test_pkg.py +++ b/tests/pytests/functional/states/test_pkg.py @@ -58,7 +58,7 @@ def refresh_keys(grains, modules): def PKG_TARGETS(grains): _PKG_TARGETS = ["figlet", "sl"] if grains["os"] == "Windows": - _PKG_TARGETS = ["npp_x64", "winrar"] + _PKG_TARGETS = ["npp_x64", "putty"] elif grains["os"] == "Amazon": if grains["osfinger"] == "Amazon Linux-2023": _PKG_TARGETS = ["lynx", "gnuplot-minimal"] @@ -225,6 +225,29 @@ def install_7zip(modules): assert "22.01.00.0" not in versions +@pytest.fixture(scope="module") +def pkg_def_contents(state_tree): + return r""" + my-software: + '1.0.1': + full_name: 'My Software' + installer: 'C:\files\mysoftware101.msi' + install_flags: '/qn /norestart' + uninstaller: 'C:\files\mysoftware101.msi' + uninstall_flags: '/qn /norestart' + msiexec: True + reboot: False + '1.0.2': + full_name: 'My Software' + installer: 'C:\files\mysoftware102.msi' + install_flags: '/qn /norestart' + uninstaller: 'C:\files\mysoftware102.msi' + uninstall_flags: '/qn /norestart' + msiexec: True + reboot: False + """ + + @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") @pytest.mark.slow_test @@ -1126,3 +1149,14 @@ def test_pkg_removed_with_version_multiple(install_7zip, modules, states): assert ret.result is True current = modules.pkg.version("7zip") assert "22.01.00.0" in current + + +@pytest.mark.skip_unless_on_windows() +def test_pkg_latest_test_true(states, modules, state_tree, pkg_def_contents): + repo_dir = state_tree / "winrepo_ng" + with pytest.helpers.temp_file("my-software.sls", pkg_def_contents, repo_dir): + modules.pkg.refresh_db() + assert len(modules.pkg.get_package_info("my-software")) == 2 + result = states.pkg.latest("my-software", test=True) + expected = {"my-software": {"new": "1.0.2", "old": ""}} + assert result.changes == expected diff --git a/tests/pytests/functional/utils/test_win_runas.py b/tests/pytests/functional/utils/test_win_runas.py new file mode 100644 index 00000000000..b6bdabec375 --- /dev/null +++ b/tests/pytests/functional/utils/test_win_runas.py @@ -0,0 +1,56 @@ +""" +Test the win_runas util +""" + +import pytest + +import salt.utils.win_runas as win_runas + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, +] + + +@pytest.fixture +def user(): + with pytest.helpers.create_account() as account: + yield account + + +@pytest.mark.parametrize( + "cmd, expected", + [ + ("hostname && whoami", "username"), + ("hostname && echo foo", "foo"), + ("hostname && python --version", "Python"), + ], +) +def test_compound_runas(user, cmd, expected): + if expected == "username": + expected = user.username + result = win_runas.runas( + cmdLine=cmd, + username=user.username, + password=user.password, + ) + assert expected in result["stdout"] + + +@pytest.mark.parametrize( + "cmd, expected", + [ + ("hostname && whoami", "username"), + ("hostname && echo foo", "foo"), + ("hostname && python --version", "Python"), + ], +) +def test_compound_runas_unpriv(user, cmd, expected): + if expected == "username": + expected = user.username + result = win_runas.runas_unpriv( + cmd=cmd, + username=user.username, + password=user.password, + ) + assert expected in result["stdout"] diff --git a/tests/pytests/functional/utils/win_dacl/test_get_name.py b/tests/pytests/functional/utils/win_dacl/test_get_name.py index f35c1336ec4..3f3f1e33901 100644 --- a/tests/pytests/functional/utils/win_dacl/test_get_name.py +++ b/tests/pytests/functional/utils/win_dacl/test_get_name.py @@ -9,9 +9,11 @@ import pytest import salt.exceptions import salt.utils.win_dacl +from tests.support.mock import patch # Third-party libs try: + import pywintypes import win32security HAS_WIN32 = True @@ -84,12 +86,22 @@ def test_get_name_capability_sid(): assert salt.utils.win_dacl.get_name(sid_obj) is None -def test_get_name_error(): +def test_get_name_unmapped_sid(): """ - Test get_name with an un mapped SID, should throw a CommandExecutionError + Test get_name with an un mapped SID, should return the passed sid """ test_sid = "S-1-2-3-4" sid_obj = win32security.ConvertStringSidToSid(test_sid) - with pytest.raises(salt.exceptions.CommandExecutionError) as exc: - salt.utils.win_dacl.get_name(sid_obj) - assert "No mapping between account names" in exc.value.message + assert salt.utils.win_dacl.get_name(sid_obj) == test_sid + + +def test_get_name_error(): + """ + Test get_name with an unexpected error, should throw a CommandExecutionError + """ + test_sid = "S-1-2-3-4" + sid_obj = win32security.ConvertStringSidToSid(test_sid) + with patch("win32security.LookupAccountSid", side_effect=pywintypes.error): + with pytest.raises(salt.exceptions.CommandExecutionError) as exc: + salt.utils.win_dacl.get_name(sid_obj) + assert "Error resolving" in exc.value.message diff --git a/tests/pytests/integration/minion/conftest.py b/tests/pytests/integration/minion/conftest.py new file mode 100644 index 00000000000..ee64a9ffca4 --- /dev/null +++ b/tests/pytests/integration/minion/conftest.py @@ -0,0 +1,103 @@ +import os +import shutil +import subprocess + +import pytest + +import salt.utils.platform +from tests.conftest import FIPS_TESTRUN + + +@pytest.fixture +def salt_master_1(request, salt_factories): + config_defaults = { + "open_mode": True, + "transport": request.config.getoption("--transport"), + } + config_overrides = { + "interface": "127.0.0.1", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + } + + factory = salt_factories.salt_master_daemon( + "master-1", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + +@pytest.fixture +def salt_master_2(salt_factories, salt_master_1): + if salt.utils.platform.is_darwin() or salt.utils.platform.is_freebsd(): + subprocess.check_output(["ifconfig", "lo0", "alias", "127.0.0.2", "up"]) + + config_defaults = { + "open_mode": True, + "transport": salt_master_1.config["transport"], + } + config_overrides = { + "interface": "127.0.0.2", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + } + + # Use the same ports for both masters, they are binding to different interfaces + for key in ( + "ret_port", + "publish_port", + ): + config_overrides[key] = salt_master_1.config[key] + factory = salt_factories.salt_master_daemon( + "master-2", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + + # The secondary salt master depends on the primarily salt master fixture + # because we need to clone the keys + for keyfile in ("master.pem", "master.pub"): + shutil.copyfile( + os.path.join(salt_master_1.config["pki_dir"], keyfile), + os.path.join(factory.config["pki_dir"], keyfile), + ) + with factory.started(start_timeout=120): + yield factory + + +@pytest.fixture +def salt_minion_1(salt_master_1, salt_master_2): + config_defaults = { + "transport": salt_master_1.config["transport"], + } + + master_1_port = salt_master_1.config["ret_port"] + master_1_addr = salt_master_1.config["interface"] + master_2_port = salt_master_2.config["ret_port"] + master_2_addr = salt_master_2.config["interface"] + config_overrides = { + "master": [ + f"{master_1_addr}:{master_1_port}", + f"{master_2_addr}:{master_2_port}", + ], + "test.foo": "baz", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } + factory = salt_master_1.salt_minion_daemon( + "minion-1", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory diff --git a/tests/pytests/integration/minion/test_job_return.py b/tests/pytests/integration/minion/test_job_return.py index c91748597dc..e4a76ae897c 100644 --- a/tests/pytests/integration/minion/test_job_return.py +++ b/tests/pytests/integration/minion/test_job_return.py @@ -1,107 +1,5 @@ -import os -import shutil -import subprocess - import pytest -import salt.utils.platform -from tests.conftest import FIPS_TESTRUN - - -@pytest.fixture -def salt_master_1(request, salt_factories): - config_defaults = { - "open_mode": True, - "transport": request.config.getoption("--transport"), - } - config_overrides = { - "interface": "127.0.0.1", - "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), - } - - factory = salt_factories.salt_master_daemon( - "master-1", - defaults=config_defaults, - overrides=config_overrides, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - ) - with factory.started(start_timeout=120): - yield factory - - -@pytest.fixture -def salt_master_2(salt_factories, salt_master_1): - if salt.utils.platform.is_darwin() or salt.utils.platform.is_freebsd(): - subprocess.check_output(["ifconfig", "lo0", "alias", "127.0.0.2", "up"]) - - config_defaults = { - "open_mode": True, - "transport": salt_master_1.config["transport"], - } - config_overrides = { - "interface": "127.0.0.2", - "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), - } - - # Use the same ports for both masters, they are binding to different interfaces - for key in ( - "ret_port", - "publish_port", - ): - config_overrides[key] = salt_master_1.config[key] - factory = salt_factories.salt_master_daemon( - "master-2", - defaults=config_defaults, - overrides=config_overrides, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - ) - - # The secondary salt master depends on the primarily salt master fixture - # because we need to clone the keys - for keyfile in ("master.pem", "master.pub"): - shutil.copyfile( - os.path.join(salt_master_1.config["pki_dir"], keyfile), - os.path.join(factory.config["pki_dir"], keyfile), - ) - with factory.started(start_timeout=120): - yield factory - - -@pytest.fixture -def salt_minion_1(salt_master_1, salt_master_2): - config_defaults = { - "transport": salt_master_1.config["transport"], - } - - master_1_port = salt_master_1.config["ret_port"] - master_1_addr = salt_master_1.config["interface"] - master_2_port = salt_master_2.config["ret_port"] - master_2_addr = salt_master_2.config["interface"] - config_overrides = { - "master": [ - f"{master_1_addr}:{master_1_port}", - f"{master_2_addr}:{master_2_port}", - ], - "test.foo": "baz", - "fips_mode": FIPS_TESTRUN, - "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", - "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", - } - factory = salt_master_1.salt_minion_daemon( - "minion-1", - defaults=config_defaults, - overrides=config_overrides, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - ) - with factory.started(start_timeout=120): - yield factory - @pytest.mark.timeout_unless_on_windows(360) def test_job_return(salt_master_1, salt_master_2, salt_minion_1): diff --git a/tests/pytests/integration/minion/test_start_event.py b/tests/pytests/integration/minion/test_start_event.py new file mode 100644 index 00000000000..b6f1406e701 --- /dev/null +++ b/tests/pytests/integration/minion/test_start_event.py @@ -0,0 +1,23 @@ +import time + +import pytest + + +@pytest.fixture +def start(): + return time.time() + + +def test_minion_start_event( + start, event_listener, salt_master_1, salt_master_2, salt_minion_1 +): + start_events = event_listener.wait_for_events( + [ + (salt_master_1.id, f"salt/minion/{salt_minion_1.id}/start"), + (salt_master_2.id, f"salt/minion/{salt_minion_1.id}/start"), + ], + timeout=60, + after_time=start, + ) + assert not start_events.missed + assert len(start_events.matches) == 2 diff --git a/tests/pytests/integration/minion/test_startup_states.py b/tests/pytests/integration/minion/test_startup_states.py new file mode 100644 index 00000000000..d3bc2204161 --- /dev/null +++ b/tests/pytests/integration/minion/test_startup_states.py @@ -0,0 +1,114 @@ +"""Test minion configuration option startup_states. + +There are four valid values for this option, which are validated by checking the jobs +executed after minion start. +""" + +import pytest + + +@pytest.fixture +def salt_minion_startup_states_empty_string(salt_master, salt_minion_id): + config_overrides = { + "startup_states": "", + } + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-empty-string", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +@pytest.fixture +def salt_minion_startup_states_highstate(salt_master, salt_minion_id): + config_overrides = { + "startup_states": "highstate", + } + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-highstate", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +@pytest.fixture +def salt_minion_startup_states_sls(salt_master, salt_minion_id): + config_overrides = {"startup_states": "sls", "sls_list": ["example-sls"]} + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-sls", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +@pytest.fixture +def salt_minion_startup_states_top(salt_master, salt_minion_id): + config_overrides = {"startup_states": "top", "top_file": "example-top.sls"} + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-top", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +def test_startup_states_empty_string( + salt_run_cli, salt_minion_startup_states_empty_string +): + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_empty_string.id}" + ) + # Check no job was run + assert len(ret.data.keys()) == 0 + + +def test_startup_states_highstate(salt_run_cli, salt_minion_startup_states_highstate): + with salt_minion_startup_states_highstate: + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_highstate.id}" + ) + # Check there is exactly one job + assert len(ret.data.keys()) == 1 + # Check that job executes state.highstate + job_ret = next(iter(ret.data.values())) + assert "Function" in job_ret + assert job_ret["Function"] == "state.highstate" + assert "Arguments" in job_ret + assert job_ret["Arguments"] == [] + + +def test_startup_states_sls(salt_run_cli, salt_minion_startup_states_sls): + with salt_minion_startup_states_sls: + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_sls.id}" + ) + # Check there is exactly one job + assert len(ret.data.keys()) == 1 + # Check that job executes state.sls + job_ret = next(iter(ret.data.values())) + assert "Function" in job_ret + assert job_ret["Function"] == "state.sls" + assert "Arguments" in job_ret + assert job_ret["Arguments"] == [["example-sls"]] + + +def test_startup_states_top(salt_run_cli, salt_minion_startup_states_top): + with salt_minion_startup_states_top: + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_top.id}" + ) + # Check there is exactly one job + assert len(ret.data.keys()) == 1 + # Check that job executes state.top + job_ret = next(iter(ret.data.values())) + assert "Function" in job_ret + assert job_ret["Function"] == "state.top" + assert "Arguments" in job_ret + assert job_ret["Arguments"] == ["example-top.sls"] diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py index 3015a2a30cb..b5d2d30b936 100644 --- a/tests/pytests/unit/grains/test_core.py +++ b/tests/pytests/unit/grains/test_core.py @@ -1877,6 +1877,37 @@ def test_lxc_virtual_with_virt_what(): assert ret["virtual_subtype"] == "LXC" +@pytest.mark.skip_on_windows +def test_podman_virtual_with_systemd_detect_virt(): + """ + Test if virtual grains are parsed correctly in Podman using systemd-detect-virt. + """ + + def _which_side_effect(path): + if path == "systemd-detect-virt": + return "/usr/bin/systemd-detect-virt" + return None + + with patch.object( + salt.utils.platform, "is_windows", MagicMock(return_value=False) + ), patch.object( + salt.utils.path, + "which", + MagicMock(return_value=True, side_effect=_which_side_effect), + ), patch.dict( + core.__salt__, + { + "cmd.run_all": MagicMock( + return_value={"pid": 78, "retcode": 0, "stderr": "", "stdout": "podman"} + ) + }, + ): + osdata = {"kernel": "test"} + ret = core._virtual(osdata) + assert ret["virtual"] == "container" + assert ret["virtual_subtype"] == "Podman" + + @pytest.mark.skip_on_windows def test_container_inside_virtual_machine(): """ diff --git a/tests/pytests/unit/modules/test_cmdmod.py b/tests/pytests/unit/modules/test_cmdmod.py index e1f2a604cd1..5501d8a0517 100644 --- a/tests/pytests/unit/modules/test_cmdmod.py +++ b/tests/pytests/unit/modules/test_cmdmod.py @@ -24,6 +24,11 @@ from salt.exceptions import CommandExecutionError from tests.support.mock import MagicMock, Mock, MockTimedProc, mock_open, patch from tests.support.runtests import RUNTIME_VARS +pytestmark = [ + pytest.mark.core_test, + pytest.mark.windows_whitelisted, +] + DEFAULT_SHELL = "foo/bar" MOCK_SHELL_FILE = "# List of acceptable shells\n\n/bin/bash\n" @@ -1052,6 +1057,7 @@ def test_runas_env_sudo_group(bundled): ) +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd_no_powershell(): with pytest.raises(CommandExecutionError): cmdmod._prep_powershell_cmd( @@ -1064,8 +1070,10 @@ def test_prep_powershell_cmd_no_powershell(): [ ("Write-Host foo", "& Write-Host foo"), ("$PSVersionTable", "$PSVersionTable"), + ("try {this} catch {that}", "try {this} catch {that}"), ], ) +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd(cmd, parsed): """ Tests _prep_powershell_cmd returns correct cmd @@ -1089,6 +1097,7 @@ def test_prep_powershell_cmd(cmd, parsed): assert ret == expected +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd_encoded(): """ Tests _prep_powershell_cmd returns correct cmd when encoded_cmd=True @@ -1114,6 +1123,7 @@ def test_prep_powershell_cmd_encoded(): assert ret == expected +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd_script(): """ Tests _prep_powershell_cmd returns correct cmd when called from cmd.script @@ -1133,7 +1143,7 @@ def test_prep_powershell_cmd_script(): "-ExecutionPolicy", "Bypass", "-Command", - f"& {script}", + f"& {script}; exit $LASTEXITCODE", ] assert ret == expected @@ -1147,6 +1157,7 @@ def test_prep_powershell_cmd_script(): ('{"foo": "bar"}', '{"foo": "bar"}'), # Should leave unchanged ], ) +@pytest.mark.skip_unless_on_windows def test_prep_powershell_json(text, expected): """ Make sure the output is valid json diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index 179f3113d55..04fa56986eb 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -874,7 +874,7 @@ def test_list_upgrades_dnf(): "--enablerepo=good", "--branch=foo", "list", - "upgrades", + "--upgrades", ], env={}, output_loglevel="trace", @@ -899,7 +899,7 @@ def test_list_upgrades_dnf(): "--enablerepo=good", "--branch=foo", "list", - "upgrades", + "--upgrades", ], env={}, output_loglevel="trace", @@ -926,7 +926,7 @@ def test_list_downloaded(): mock_walk = MagicMock( return_value=[ ( - "/var/cache/yum", + os.path.join("/var/cache", yumpkg._yum()), [], ["pkg1-3.1-16.1.x86_64.rpm", "pkg2-1.2-13.2.x86_64.rpm"], ) @@ -955,7 +955,9 @@ def test_list_downloaded(): "3.1": { "creation_date_time": "2023-10-05T14:01:22", "creation_date_time_t": 1696536082, - "path": "/var/cache/yum/pkg1-3.1-16.1.x86_64.rpm", + "path": os.path.join( + "/var/cache", yumpkg._yum(), "pkg1-3.1-16.1.x86_64.rpm" + ), "size": 75701688, }, }, @@ -963,7 +965,9 @@ def test_list_downloaded(): "1.2": { "creation_date_time": "2023-10-05T14:01:22", "creation_date_time_t": 1696536082, - "path": "/var/cache/yum/pkg2-1.2-13.2.x86_64.rpm", + "path": os.path.join( + "/var/cache", yumpkg._yum(), "pkg2-1.2-13.2.x86_64.rpm" + ), "size": 75701688, }, }, @@ -1150,11 +1154,12 @@ def test_download(): patch_salt = patch.dict(yumpkg.__salt__, dict_salt) with patch_which, patch_exists, patch_makedirs, patch_listdir, patch_salt: result = yumpkg.download("spongebob") - cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + cache_dir = os.path.join("/var/cache", yumpkg._yum(), "packages") + cmd = ["yumdownloader", "-q", f"--destdir={cache_dir}", "spongebob"] mock_run.assert_called_once_with( cmd, output_loglevel="trace", python_shell=False ) - expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + expected = {"spongebob": f"{cache_dir}/spongebob-1.2.rpm"} assert result == expected @@ -1171,10 +1176,11 @@ def test_download_failed(): patch_salt = patch.dict(yumpkg.__salt__, dict_salt) with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: result = yumpkg.download("spongebob", "patrick") + cache_dir = os.path.join("/var/cache", yumpkg._yum(), "packages") cmd = [ "yumdownloader", "-q", - "--destdir=/var/cache/yum/packages", + f"--destdir={cache_dir}", "spongebob", "patrick", ] @@ -1183,7 +1189,7 @@ def test_download_failed(): ) expected = { "_error": "The following package(s) failed to download: patrick", - "spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm", + "spongebob": f"{cache_dir}/spongebob-1.2.rpm", } assert result == expected @@ -1207,11 +1213,12 @@ def test_download_to_purge(): patch_salt = patch.dict(yumpkg.__salt__, dict_salt) with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: result = yumpkg.download("spongebob") - cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + cache_dir = os.path.join("/var/cache", yumpkg._yum(), "packages") + cmd = ["yumdownloader", "-q", f"--destdir={cache_dir}", "spongebob"] mock_run.assert_called_once_with( cmd, output_loglevel="trace", python_shell=False ) - expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + expected = {"spongebob": f"{cache_dir}/spongebob-1.2.rpm"} assert result == expected @@ -3163,6 +3170,15 @@ def test_services_need_restart_no_dnf_output(): assert yumpkg.services_need_restart() == [] +def test_services_need_restart_no_dnf5_output(): + patch_yum = patch("salt.modules.yumpkg._yum", Mock(return_value="dnf5")) + patch_booted = patch("salt.utils.systemd.booted", Mock(return_value=True)) + mock_run_stdout = MagicMock(return_value="") + patch_run_stdout = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_run_stdout}) + with patch_yum, patch_booted, patch_run_stdout: + assert yumpkg.services_need_restart() == [] + + def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs(): patch_list_pkgs = patch( "salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py index 85dac2a0ae7..c3605291370 100644 --- a/tests/pytests/unit/test_minion.py +++ b/tests/pytests/unit/test_minion.py @@ -515,7 +515,7 @@ async def test_process_count_max(minion_opts, io_loop): @pytest.mark.slow_test -def test_beacons_before_connect(minion_opts): +async def test_beacons_before_connect(minion_opts): """ Tests that the 'beacons_before_connect' option causes the beacons to be initialized before connect. """ @@ -535,7 +535,7 @@ def test_beacons_before_connect(minion_opts): try: try: - minion.tune_in(start=True) + await minion.tune_in(start=True) except RuntimeError: pass @@ -547,7 +547,7 @@ def test_beacons_before_connect(minion_opts): @pytest.mark.slow_test -def test_scheduler_before_connect(minion_opts): +async def test_scheduler_before_connect(minion_opts): """ Tests that the 'scheduler_before_connect' option causes the scheduler to be initialized before connect. """ @@ -566,7 +566,7 @@ def test_scheduler_before_connect(minion_opts): minion = salt.minion.Minion(minion_opts, io_loop=io_loop) try: try: - minion.tune_in(start=True) + await minion.tune_in(start=True) except RuntimeError: pass @@ -636,7 +636,7 @@ def test_minion_module_refresh_beacons_refresh(minion_opts): @pytest.mark.slow_test -def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_callbacks( +async def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_callbacks( minion_opts, ): with patch("salt.minion.Minion.ctx", MagicMock(return_value={})), patch( diff --git a/tests/unit/utils/test_reactor.py b/tests/pytests/unit/utils/test_reactor2.py similarity index 59% rename from tests/unit/utils/test_reactor.py rename to tests/pytests/unit/utils/test_reactor2.py index 4123bdfb9b2..0e1c32d950f 100644 --- a/tests/unit/utils/test_reactor.py +++ b/tests/pytests/unit/utils/test_reactor2.py @@ -4,14 +4,17 @@ import logging import os import textwrap +import pytest + import salt.loader +import salt.template import salt.utils.data import salt.utils.files import salt.utils.reactor as reactor import salt.utils.yaml -from tests.support.mixins import AdaptedConfigurationTestCaseMixin from tests.support.mock import MagicMock, Mock, mock_open, patch -from tests.support.unit import TestCase + +log = logging.getLogger(__name__) REACTOR_CONFIG = """\ reactor: @@ -143,9 +146,6 @@ SLS = { } LOW_CHUNKS = { - # Note that the "name" value in the chunk has been overwritten by the - # "name" argument in the SLS. This is one reason why the new schema was - # needed. "old_runner": [ { "state": "runner", @@ -183,7 +183,7 @@ LOW_CHUNKS = { ], "old_cmd": [ { - "state": "local", # 'cmd' should be aliased to 'local' + "state": "local", "__id__": "install_zsh", "name": "install_zsh", "__sls__": "/srv/reactor/old_cmd.sls", @@ -379,173 +379,207 @@ WRAPPER_CALLS = { "new_caller": {"args": ("file.touch",), "kwargs": {"name": "/tmp/foo"}}, } -log = logging.getLogger(__name__) + +# ----------------------------------------------------------------------------- +# FIXTURES +# ----------------------------------------------------------------------------- +@pytest.fixture +def react_master_opts(master_opts): + opts = { + # Minimal stand-in for a real master config + "file_roots": {"base": []}, + "renderer": "jinja|yaml", + } + master_opts.update(opts) + # Optionally parse the reactor config for convenience + reactor_config = salt.utils.yaml.safe_load(REACTOR_CONFIG) + master_opts.update(reactor_config) + return master_opts -class TestReactor(TestCase, AdaptedConfigurationTestCaseMixin): +@pytest.fixture +def test_reactor(react_master_opts): """ - Tests for constructing the low chunks to be executed via the Reactor + Create a Reactor instance for testing """ - - @classmethod - def setUpClass(cls): - """ - Load the reactor config for mocking - """ - cls.opts = cls.get_temp_config("master") - reactor_config = salt.utils.yaml.safe_load(REACTOR_CONFIG) - cls.opts.update(reactor_config) - cls.reactor = reactor.Reactor(cls.opts) - cls.reaction_map = salt.utils.data.repack_dictlist(reactor_config["reactor"]) - renderers = salt.loader.render(cls.opts, {}) - cls.render_pipe = [(renderers[x], "") for x in ("jinja", "yaml")] - - @classmethod - def tearDownClass(cls): - del cls.opts - del cls.reactor - del cls.render_pipe - - def test_list_reactors(self): - """ - Ensure that list_reactors() returns the correct list of reactor SLS - files for each tag. - """ - for schema in ("old", "new"): - for rtype in REACTOR_DATA: - tag = "_".join((schema, rtype)) - self.assertEqual( - self.reactor.list_reactors(tag), self.reaction_map[tag] - ) - - def test_reactions(self): - """ - Ensure that the correct reactions are built from the configured SLS - files and tag data. - """ - for schema in ("old", "new"): - for rtype in REACTOR_DATA: - tag = "_".join((schema, rtype)) - log.debug("test_reactions: processing %s", tag) - reactors = self.reactor.list_reactors(tag) - log.debug("test_reactions: %s reactors: %s", tag, reactors) - # No globbing in our example SLS, and the files don't actually - # exist, so mock glob.glob to just return back the path passed - # to it. - with patch.object(glob, "glob", MagicMock(side_effect=lambda x: [x])): - # The below four mocks are all so that - # salt.template.compile_template() will read the templates - # we've mocked up in the SLS global variable above. - with patch.object(os.path, "isfile", MagicMock(return_value=True)): - with patch.object( - salt.utils.files, "is_empty", MagicMock(return_value=False) - ): - with patch.object( - codecs, "open", mock_open(read_data=SLS[reactors[0]]) - ): - with patch.object( - salt.template, - "template_shebang", - MagicMock(return_value=self.render_pipe), - ): - reactions = self.reactor.reactions( - tag, - REACTOR_DATA[rtype], - reactors, - ) - log.debug( - "test_reactions: %s reactions: %s", - tag, - reactions, - ) - self.assertEqual(reactions, LOW_CHUNKS[tag]) + return reactor.Reactor(react_master_opts) -class TestReactWrap(TestCase, AdaptedConfigurationTestCaseMixin): +@pytest.fixture +def reaction_map(react_master_opts): """ - Tests that we are formulating the wrapper calls properly + Reaction map from the configured reactor """ + return salt.utils.data.repack_dictlist(react_master_opts["reactor"]) - @classmethod - def setUpClass(cls): - cls.wrap = reactor.ReactWrap(cls.get_temp_config("master")) - @classmethod - def tearDownClass(cls): - del cls.wrap +@pytest.fixture +def render_pipe(react_master_opts): + """ + Render pipeline + """ + renderers = salt.loader.render(react_master_opts, {}) + return [(renderers[x], "") for x in ("jinja", "yaml")] - def test_runner(self): - """ - Test runner reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "runner")) - chunk = LOW_CHUNKS[tag][0] - thread_pool = Mock() - thread_pool.fire_async = Mock() - with patch.object(self.wrap, "pool", thread_pool): - self.wrap.run(chunk) - thread_pool.fire_async.assert_called_with( - self.wrap.client_cache["runner"].low, args=WRAPPER_CALLS[tag] - ) - def test_wheel(self): - """ - Test wheel reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "wheel")) - chunk = LOW_CHUNKS[tag][0] - thread_pool = Mock() - thread_pool.fire_async = Mock() - with patch.object(self.wrap, "pool", thread_pool): - self.wrap.run(chunk) - thread_pool.fire_async.assert_called_with( - self.wrap.client_cache["wheel"].low, args=WRAPPER_CALLS[tag] - ) +# ----------------------------------------------------------------------------- +# TESTS for Reactor building the low chunks +# ----------------------------------------------------------------------------- +@pytest.mark.parametrize("schema", ["old", "new"]) +@pytest.mark.parametrize("rtype", list(REACTOR_DATA.keys())) +def test_reactor_reactions(schema, rtype, test_reactor, render_pipe): + """ + Ensure correct reactions are built from the configured SLS files and tag data. + """ + tag = f"{schema}_{rtype}" + reactors_list = test_reactor.list_reactors(tag) - def test_local(self): - """ - Test local reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "local")) - chunk = LOW_CHUNKS[tag][0] - client_cache = {"local": Mock()} - client_cache["local"].cmd_async = Mock() - with patch.object(self.wrap, "client_cache", client_cache): - self.wrap.run(chunk) - client_cache["local"].cmd_async.assert_called_with( - *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] - ) + # Patch out globbing since these SLS files don't actually exist on disk + with patch.object(glob, "glob", MagicMock(side_effect=lambda x: [x])): + with patch.object(os.path, "isfile", MagicMock(return_value=True)): + with patch.object( + salt.utils.files, "is_empty", MagicMock(return_value=False) + ): + with patch.object( + codecs, "open", mock_open(read_data=SLS[reactors_list[0]]) + ): + with patch.object( + salt.template, + "template_shebang", + MagicMock(return_value=render_pipe), + ): + reactions = test_reactor.reactions( + tag, REACTOR_DATA[rtype], reactors_list + ) + assert reactions == LOW_CHUNKS[tag], f"Reactions did not match for tag: {tag}" - def test_cmd(self): - """ - Test cmd reactions (alias for 'local') using both the old and new - config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "cmd")) - chunk = LOW_CHUNKS[tag][0] - client_cache = {"local": Mock()} - client_cache["local"].cmd_async = Mock() - with patch.object(self.wrap, "client_cache", client_cache): - self.wrap.run(chunk) - client_cache["local"].cmd_async.assert_called_with( - *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] - ) - def test_caller(self): - """ - Test caller reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "caller")) - chunk = LOW_CHUNKS[tag][0] - client_cache = {"caller": Mock()} - client_cache["caller"].cmd = Mock() - with patch.object(self.wrap, "client_cache", client_cache): - self.wrap.run(chunk) - client_cache["caller"].cmd.assert_called_with( - *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] - ) +def test_list_reactors(test_reactor, reaction_map): + """ + Ensure list_reactors() returns the correct list of reactor SLS files for each tag. + """ + for schema in ("old", "new"): + for rtype in REACTOR_DATA: + tag = f"{schema}_{rtype}" + assert test_reactor.list_reactors(tag) == reaction_map[tag] + + +# ----------------------------------------------------------------------------- +# FIXTURE for Reactor Wrap +# ----------------------------------------------------------------------------- +@pytest.fixture +def react_wrap(react_master_opts): + """ + Create a ReactWrap instance + """ + return reactor.ReactWrap(react_master_opts) + + +# ----------------------------------------------------------------------------- +# TESTS for ReactWrap +# ----------------------------------------------------------------------------- +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_runner(schema, react_wrap): + """ + Test runner reactions using both the old and new config schema + """ + tag = f"{schema}_runner" + chunk = LOW_CHUNKS[tag][0] + thread_pool = Mock() + thread_pool.fire_async = Mock() + with patch.object(react_wrap, "pool", thread_pool): + react_wrap.run(chunk) + thread_pool.fire_async.assert_called_with( + react_wrap.client_cache["runner"].low, + args=WRAPPER_CALLS[tag], + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_wheel(schema, react_wrap): + """ + Test wheel reactions using both the old and new config schema + """ + tag = f"{schema}_wheel" + chunk = LOW_CHUNKS[tag][0] + thread_pool = Mock() + thread_pool.fire_async = Mock() + with patch.object(react_wrap, "pool", thread_pool): + react_wrap.run(chunk) + thread_pool.fire_async.assert_called_with( + react_wrap.client_cache["wheel"].low, + args=WRAPPER_CALLS[tag], + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_local(schema, react_wrap): + """ + Test local reactions using both the old and new config schema + """ + tag = f"{schema}_local" + chunk = LOW_CHUNKS[tag][0] + client_cache = {"local": Mock()} + client_cache["local"].cmd_async = Mock() + with patch.object(react_wrap, "client_cache", client_cache): + react_wrap.run(chunk) + client_cache["local"].cmd_async.assert_called_with( + *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_cmd(schema, react_wrap): + """ + Test cmd reactions (alias for 'local') using both the old and new config schema + """ + tag = f"{schema}_cmd" + chunk = LOW_CHUNKS[tag][0] + client_cache = {"local": Mock()} + client_cache["local"].cmd_async = Mock() + with patch.object(react_wrap, "client_cache", client_cache): + react_wrap.run(chunk) + client_cache["local"].cmd_async.assert_called_with( + *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_caller(schema, react_wrap): + """ + Test caller reactions using both the old and new config schema + """ + tag = f"{schema}_caller" + chunk = LOW_CHUNKS[tag][0] + client_cache = {"caller": Mock()} + client_cache["caller"].cmd = Mock() + with patch.object(react_wrap, "client_cache", client_cache): + react_wrap.run(chunk) + client_cache["caller"].cmd.assert_called_with( + *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] + ) + + +@pytest.mark.parametrize("file_client", ["runner", "wheel"]) +def test_client_cache_missing_key(file_client, react_wrap): + """ + Test client_cache file_client missing, gets repopulated + """ + client_cache = {} + tag = f"new_{file_client}" + chunk = LOW_CHUNKS[tag][0] + with patch.object(react_wrap, "client_cache", client_cache): + if f"{file_client}" == "runner": + react_wrap.runner(chunk) + elif f"{file_client}" == "wheel": + react_wrap.wheel(chunk) + else: + # catch need for new check + assert f"{file_client}" == "bad parameterization" + + file_client_key = None + for key in react_wrap.client_cache.keys(): + if key == f"{file_client}": + file_client_key = key + + assert file_client_key == f"{file_client}" diff --git a/tests/support/pkg.py b/tests/support/pkg.py index 6c67f948540..1adbc38013c 100644 --- a/tests/support/pkg.py +++ b/tests/support/pkg.py @@ -8,7 +8,7 @@ import re import shutil import textwrap import time -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING import attr import distro @@ -50,7 +50,7 @@ class SaltPkgInstall: ssm_bin: pathlib.Path = attr.ib(default=None) bin_dir: pathlib.Path = attr.ib(default=None) install_dir: pathlib.Path = attr.ib(init=False) - binary_paths: Dict[str, List[pathlib.Path]] = attr.ib(init=False) + binary_paths: dict[str, list[pathlib.Path]] = attr.ib(init=False) config_path: str = attr.ib(init=False) conf_dir: pathlib.Path = attr.ib() @@ -79,8 +79,8 @@ class SaltPkgInstall: pkg_mngr: str = attr.ib(init=False) rm_pkg: str = attr.ib(init=False) dbg_pkg: str = attr.ib(init=False) - salt_pkgs: List[str] = attr.ib(init=False) - pkgs: List[str] = attr.ib(factory=list) + salt_pkgs: list[str] = attr.ib(init=False) + pkgs: list[str] = attr.ib(factory=list) file_ext: bool = attr.ib(default=None) relenv: bool = attr.ib(default=True) @@ -1564,7 +1564,7 @@ class ApiRequest: account: TestAccount = attr.ib(repr=False) session: requests.Session = attr.ib(init=False, repr=False) api_uri: str = attr.ib(init=False) - auth_data: Dict[str, str] = attr.ib(init=False) + auth_data: dict[str, str] = attr.ib(init=False) @session.default def _default_session(self): diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py index e7f38aafaa3..5ffbc537e4b 100644 --- a/tests/support/pytest/helpers.py +++ b/tests/support/pytest/helpers.py @@ -333,8 +333,9 @@ class TestAccount: if salt.utils.platform.is_windows(): log.debug("Configuring system account: %s", self) ret = self.sminion.functions.user.update( - self.username, password_never_expires=True + self.username, expired=False, password_never_expires=True ) + assert ret is True if salt.utils.platform.is_darwin() or salt.utils.platform.is_windows(): password = self.password else: diff --git a/tools/__init__.py b/tools/__init__.py index af50a06ef47..9fc43adbf12 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -31,6 +31,7 @@ RELEASE_VENV_CONFIG = VirtualEnvPipConfig( ptscripts.set_default_config(DEFAULT_REQS_CONFIG) ptscripts.register_tools_module("tools.changelog") ptscripts.register_tools_module("tools.ci") +ptscripts.register_tools_module("tools.container") ptscripts.register_tools_module("tools.docs") ptscripts.register_tools_module("tools.gh") ptscripts.register_tools_module("tools.pkg") diff --git a/tools/container.py b/tools/container.py new file mode 100644 index 00000000000..43c8c11dbf4 --- /dev/null +++ b/tools/container.py @@ -0,0 +1,108 @@ +import os + +from ptscripts import Context, command_group + +cmd = command_group(name="container", help="Container Commands", description=__doc__) + + +def has_network(ctx, name): + p = ctx.run("docker", "network", "ls", capture=True) + return name in p.stdout.decode() + + +def create_network(ctx, name): + p = ctx.run( + "docker", + "network", + "create", + "-o", + "com.docker.network.driver.mtu=1500", + "--ipv6", + "--subnet", + "2001:db8::/64", + name, + ) + if p.returncode != 0: + raise RuntimeError(f"docker network create returned {p.returncode}") + + +@cmd.command( + name="create", + arguments={ + "image": {"help": "Name the container image to use."}, + "name": {"help": "Name the container being created.", "default": ""}, + }, +) +def create(ctx: Context, image: str, name: str = ""): + onci = "GITHUB_WORKFLOW" in os.environ + workdir = "/salt" + home = "/root" + network = "ip6net" + if not onci and not has_network(ctx, network): + ctx.info(f"Creating docker network: {network}") + create_network(ctx, network) + if onci: + workdir = "/__w/salt/salt" + home = "/github/home" + env = { + "HOME": home, + "SKIP_REQUIREMENTS_INSTALL": "1", + "PRINT_TEST_SELECTION": "0", + "PRINT_TEST_PLAN_ONLY": "0", + "PRINT_SYSTEM_INFO": "0", + "RERUN_FAILURES": "0", + "SKIP_INITIAL_ONEDIR_FAILURES": "1", + "SKIP_INITIAL_GH_ACTIONS_FAILURES": "1", + "RAISE_DEPRECATIONS_RUNTIME_ERRORS": "1", + "LANG": "en_US.UTF-8", + "SHELL": "/bin/bash", + } + for var in [ + "PIP_INDEX_URL", + "PIP_EXTRA_INDEX_URL", + "PIP_TRUSTED_HOST", + "PIP_DISABLE_PIP_VERSION_CHECK", + "SALT_TRANSPORT", + # Are both of these really needed? + "GITHUB_ACTIONS", + "GITHUB_ACTIONS_PIPELINE", + "CI", + "SKIP_CODE_COVERAGE", + "COVERAGE_CONTEXT", + "RERUN_FAILURES", + "COLUMNS", + ]: + if var in os.environ: + env[var] = os.environ[var] + cmd = [ + "/usr/bin/docker", + "create", + f"--name={name}", + "--privileged", + f"--workdir={workdir}", + "-v", + "/tmp/:/var/lib/docker", + ] + for key in env: + cmd.extend(["-e", f"{key}={env[key]}"]) + if onci: + cmd.extend(["-v", "/home/runner/work:/__w"]) + else: + cmd.extend(["-v", f"{os.getcwd()}:/salt"]) + cmd.extend(["--network", network]) + if name: + cmd.extend(["--name", name]) + cmd.extend( + [ + "--entrypoint", + "/usr/lib/systemd/systemd", + image, + "--systemd", + "--unit", + "rescue.target", + ], + ) + ctx.info(f"command is: {cmd}") + ret = ctx.run(*cmd, capture=True, check=False) + if ret.returncode != 0: + ctx.warn(ret.stderr.decode())