diff --git a/.github/actions/build-onedir-salt/action.yml b/.github/actions/build-onedir-salt/action.yml index 0e3888cada6..abac7a8d9e0 100644 --- a/.github/actions/build-onedir-salt/action.yml +++ b/.github/actions/build-onedir-salt/action.yml @@ -29,21 +29,13 @@ runs: steps: - - name: Download Cached Deps Onedir Package Directory - id: onedir-bare-cache - uses: ./.github/actions/cache + - name: Install Salt Packaging Dependencies into Relenv Onedir + uses: ./.github/actions/build-onedir-deps with: - path: artifacts/${{ inputs.package-name }} - key: > - ${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ - hashFiles( - format('{0}/.relenv/**/*.xz', github.workspace), - 'requirements/static/pkg/*/*.txt', - '.github/actions/build-onedir-deps/action.yml', - '.github/workflows/build-deps-onedir-*.yml', - 'cicd/shared-gh-workflows-context.yml' - ) - }} + platform: ${{ inputs.platform }} + arch: ${{ inputs.arch }} + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ inputs.salt-version }} - name: Download Source Tarball uses: actions/download-artifact@v4 diff --git a/.github/actions/ssh-tunnel/README.md b/.github/actions/ssh-tunnel/README.md index f6f03e5b2d5..b88b4e233f6 100644 --- a/.github/actions/ssh-tunnel/README.md +++ b/.github/actions/ssh-tunnel/README.md @@ -92,3 +92,9 @@ OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0 dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9 -- Message received -- ``` + +SSH to your local port. + +``` +ssh -o StrictHostKeychecking=no -o TCPKeepAlive=no -o StrictHostKeyChecking=no -vv -p 5222 runner@localhost +``` diff --git a/.github/actions/ssh-tunnel/rtcforward.py b/.github/actions/ssh-tunnel/rtcforward.py index a0972d300db..460bd5c8488 100644 --- a/.github/actions/ssh-tunnel/rtcforward.py +++ b/.github/actions/ssh-tunnel/rtcforward.py @@ -6,6 +6,7 @@ import io import json import logging import os +import signal import sys import textwrap import time @@ -77,6 +78,42 @@ def print_pastable(data, message="offer"): sys.stdout.flush() +async def read_from_stdin(): + loop = asyncio.get_event_loop() + line = await loop.run_in_executor( + None, input, "-- Please enter a message from remote party --\n" + ) + data = line + while line: + try: + line = await loop.run_in_executor(None, input) + except EOFError: + break + data += line + print("-- Message received --") + return data + + +class Channels: + def __init__(self, channels=None): + if channels is None: + channels = [] + self.channels = channels + + def add(self, channel): + self.channels.append(channel) + + def close(self): + for channel in self.channels: + channel.close() + + +class ProxyConnection: + def __init__(self, pc, channel): + self.pc = pc + self.channel = channel + + class ProxyClient: def __init__(self, args, channel): @@ -219,29 +256,7 @@ class ProxyServer: log.exception("WTF") -class ProxyConnection: - def __init__(self, pc, channel): - self.pc = pc - self.channel = channel - - -async def read_from_stdin(): - loop = asyncio.get_event_loop() - line = await loop.run_in_executor( - None, input, "-- Please enter a message from remote party --\n" - ) - data = line - while line: - try: - line = await loop.run_in_executor(None, input) - except EOFError: - break - data += line - print("-- Message received --") - return data - - -async def run_answer(pc, args): +async def run_answer(stop, pc, args): """ Top level offer answer server. """ @@ -270,11 +285,11 @@ async def run_answer(pc, args): elif obj is BYE: print("Exiting") - while True: + while not stop.is_set(): await asyncio.sleep(0.3) -async def run_offer(pc, args): +async def run_offer(stop, pc, args): """ Top level offer server this will estabilsh a data channel and start a tcp server on the port provided. New connections to the server will start the @@ -324,10 +339,14 @@ async def run_offer(pc, args): elif obj is BYE: print("Exiting") - while True: + while not stop.is_set(): await asyncio.sleep(0.3) +async def signal_handler(stop, pc): + stop.set() + + if __name__ == "__main__": if sys.platform == "win32": asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) @@ -343,16 +362,22 @@ if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) - + stop = asyncio.Event() pc = RTCPeerConnection() if args.role == "offer": - coro = run_offer(pc, args) + coro = run_offer(stop, pc, args) else: - coro = run_answer(pc, args) + coro = run_answer(stop, pc, args) # run event loop loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) + for signame in ("SIGINT", "SIGTERM"): + loop.add_signal_handler( + getattr(signal, signame), + lambda: asyncio.create_task(signal_handler(stop, pc)), + ) + try: loop.run_until_complete(coro) except KeyboardInterrupt: diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml deleted file mode 100644 index a5f4f4fc6f6..00000000000 --- a/.github/workflows/build-deps-onedir.yml +++ /dev/null @@ -1,192 +0,0 @@ ---- -name: Build Packaging Dependencies Onedir - -on: - workflow_call: - inputs: - salt-version: - type: string - required: true - description: The Salt version to set prior to building packages. - cache-seed: - required: true - type: string - description: Seed used to invalidate caches - relenv-version: - required: true - type: string - description: The version of relenv to use - python-version: - required: true - type: string - description: The version of python to use with relenv - matrix: - required: true - type: string - description: Json job matrix config - linux_arm_runner: - required: true - type: string - description: Json job matrix config - -env: - RELENV_DATA: "${{ github.workspace }}/.relenv" - COLUMNS: 190 - AWS_MAX_ATTEMPTS: "10" - AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} - PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} - PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} - PIP_DISABLE_PIP_VERSION_CHECK: "1" - -jobs: - - build-deps-linux: - name: Linux - if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }} - runs-on: - - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }} - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(inputs.matrix)['linux'] }} - env: - USE_S3_CACHE: 'false' - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v5 - with: - python-version: '3.10' - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-seed }}|build-deps|linux|${{ matrix.arch }} - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: linux - arch: ${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: linux - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - build-deps-macos: - name: macOS - if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - include: ${{ fromJSON(inputs.matrix)['macos'] }} - runs-on: - - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }} - env: - USE_S3_CACHE: 'false' - PIP_INDEX_URL: https://pypi.org/simple - steps: - - - name: "Check cores" - shell: bash - run: sysctl -n hw.ncpu - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-seed }}|build-deps|macos - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: macos - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: macos - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - build-deps-windows: - name: Windows - if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - include: ${{ fromJSON(inputs.matrix)['windows'] }} - runs-on: windows-latest - env: - USE_S3_CACHE: 'false' - PIP_INDEX_URL: https://pypi.org/simple - steps: - - - name: "Throttle Builds" - shell: bash - run: | - t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - - - uses: actions/checkout@v4 - - - name: Set up Python 3.10 - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-seed }}|build-deps|windows|${{ matrix.arch }} - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: windows - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 5c3078b2c96..39c5578eb0b 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -108,7 +108,7 @@ jobs: - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }} env: PIP_INDEX_URL: https://pypi.org/simple - + USE_S3_CACHE: 'false' steps: - name: "Check cores" shell: bash @@ -125,16 +125,6 @@ jobs: with: python-version: "3.10" - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: macos - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: @@ -146,6 +136,16 @@ jobs: with: salt-version: "${{ inputs.salt-version }}" + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: macos + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + - name: Install Salt into Relenv Onedir uses: ./.github/actions/build-onedir-salt with: @@ -166,7 +166,7 @@ jobs: runs-on: windows-latest env: PIP_INDEX_URL: https://pypi.org/simple - + USE_S3_CACHE: 'false' steps: - name: "Throttle Builds" @@ -181,16 +181,6 @@ jobs: with: python-version: "3.10" - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: @@ -202,6 +192,16 @@ jobs: with: salt-version: "${{ inputs.salt-version }}" + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: windows + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + - name: Install Salt into Relenv Onedir uses: ./.github/actions/build-onedir-salt with: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index db499c2bee5..24555cd88dc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -239,7 +239,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -257,7 +257,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -379,7 +379,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -390,7 +390,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -419,33 +419,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -459,8 +444,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -476,7 +461,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -493,7 +478,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -511,7 +496,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }} workflow-slug: ci default-timeout: 180 @@ -520,7 +505,7 @@ jobs: combine-all-code-coverage: name: Combine Code Coverage - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} runs-on: ubuntu-22.04 env: PIP_INDEX_URL: https://pypi.org/simple @@ -650,7 +635,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - prepare-workflow @@ -658,7 +643,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - combine-all-code-coverage - build-ci-deps diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 072d652318e..232626a6e21 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -296,7 +296,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -314,7 +314,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -436,7 +436,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -447,7 +447,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -476,33 +476,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -516,8 +501,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -536,8 +521,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "src" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -557,7 +542,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -574,7 +559,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -592,7 +577,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true workflow-slug: nightly default-timeout: 360 @@ -602,7 +587,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 environment: nightly needs: @@ -613,7 +598,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - build-pkgs-src - build-ci-deps diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml index 26c119aa69a..b6c2478bc24 100644 --- a/.github/workflows/pre-commit-action.yml +++ b/.github/workflows/pre-commit-action.yml @@ -48,14 +48,14 @@ jobs: cache-seed: ${{ inputs.cache-seed }} - name: Check ALL Files On Branch - if: github.event_name != 'pull_request' + if: ${{ !cancelled() && github.event_name != 'pull_request' }} env: SKIP: lint-salt,lint-tests,remove-import-headers,pyupgrade run: | pre-commit run --show-diff-on-failure --color=always --all-files - name: Check Changed Files On PR - if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] + if: ${{ !cancelled() && github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] }} env: SKIP: lint-salt,lint-tests GH_ACTIONS_ANNOTATE: "1" @@ -63,6 +63,6 @@ jobs: pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(inputs.changed-files)['repo_files'], ' ') }} - name: Check Docs On Deleted Files - if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['deleted'] + if: ${{ !cancelled() && github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['deleted'] }} run: | pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(inputs.changed-files)['deleted_files'], ' ') }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 794ecb486db..71ebbef5835 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -186,7 +186,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -422,7 +422,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - check-requirements diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 135e7a3f995..6f8ad5c8655 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -286,7 +286,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -304,7 +304,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -426,7 +426,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -437,7 +437,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -466,33 +466,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -506,8 +491,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -523,7 +508,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -540,7 +525,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -558,7 +543,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true workflow-slug: scheduled default-timeout: 360 @@ -568,7 +553,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - workflow-requirements @@ -578,7 +563,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - build-ci-deps - test-packages diff --git a/.github/workflows/ssh-debug.yml b/.github/workflows/ssh-debug.yml index 212e31c3e57..992f6b6a7b2 100644 --- a/.github/workflows/ssh-debug.yml +++ b/.github/workflows/ssh-debug.yml @@ -33,6 +33,22 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ssh-debug + + - name: Install Nox + run: | + python3 -m pip install 'nox==2022.8.7' + env: + PIP_INDEX_URL: https://pypi.org/simple + - uses: ./.github/actions/ssh-tunnel with: public_key: ${{ inputs.public_key }} diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 4668cf0cc79..16875879b5e 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -278,7 +278,7 @@ jobs: lint: name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -296,7 +296,7 @@ jobs: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }} needs: - prepare-workflow steps: @@ -419,7 +419,7 @@ jobs: build-docs: name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }} needs: - prepare-workflow - build-source-tarball @@ -430,7 +430,7 @@ jobs: build-source-tarball: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }} needs: - prepare-workflow - prepare-release @@ -459,33 +459,18 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - build-salt-onedir: name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -499,8 +484,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "onedir" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -519,8 +504,8 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.18.0" - python-version: "3.10.15" + relenv-version: "0.18.1" + python-version: "3.10.16" source: "src" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -540,7 +525,7 @@ jobs: nox-version: 2022.8.7 python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} @@ -557,7 +542,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" nox-version: 2022.8.7 python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }} @@ -575,7 +560,7 @@ jobs: python-version: "3.10" testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16 skip-code-coverage: true workflow-slug: staging default-timeout: 180 @@ -616,7 +601,7 @@ jobs: publish-pypi: name: Publish to PyPi(test) - if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} + if: ${{ !cancelled() && inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts @@ -670,11 +655,10 @@ jobs: draft-release: name: Draft Github Release - if: | - always() && (needs.test.result == 'success' || needs.test.result == 'skipped') && + if: ${{ !cancelled() && (needs.test.result == 'success' || needs.test.result == 'skipped') && (needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') && needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' && - needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' + needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' }} needs: - prepare-workflow - pre-commit @@ -695,7 +679,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 needs: - check-requirements @@ -704,7 +688,6 @@ jobs: - lint - nsis-tests - build-docs - - build-deps-onedir - build-salt-onedir - build-pkgs-src - upload-release-artifacts diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 208d74f63c1..e139f1d054d 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -29,7 +29,7 @@ lint: <%- do conclusion_needs.append('lint') %> name: Lint - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} uses: ./.github/workflows/lint-action.yml needs: - prepare-workflow @@ -58,7 +58,7 @@ name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" runs-on: - ubuntu-22.04 - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow steps: @@ -190,7 +190,7 @@ <{ job_name }>: <%- do conclusion_needs.append(job_name) %> name: Documentation - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow - build-source-tarball @@ -207,7 +207,7 @@ <{ job_name }>: name: Build Source Tarball - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow - prepare-release @@ -238,37 +238,15 @@ <%- endif %> - - <%- set job_name = "build-deps-onedir" %> - <%- if includes.get(job_name, True) %> - - <{ job_name }>: - <%- do conclusion_needs.append(job_name) %> - name: Build Onedir Dependencies - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} - needs: - - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml - with: - cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "<{ relenv_version }>" - python-version: "<{ python_version }>" - matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }} - linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }} - - <%- endif %> - <%- set job_name = "build-salt-onedir" %> <%- if includes.get(job_name, True) %> <{ job_name }>: <%- do conclusion_needs.append(job_name) %> name: Build Salt Onedir - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }} needs: - prepare-workflow - - build-deps-onedir - build-source-tarball uses: ./.github/workflows/build-salt-onedir.yml with: @@ -306,7 +284,7 @@ combine-all-code-coverage: <%- do conclusion_needs.append("combine-all-code-coverage") %> name: Combine Code Coverage - if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} + if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }} runs-on: ubuntu-22.04 env: PIP_INDEX_URL: https://pypi.org/simple diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 93dd27dc389..213ac2c9cb4 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -319,7 +319,7 @@ jobs: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status - if: always() + if: ${{ !cancelled() && always() }} runs-on: ubuntu-22.04 <%- if workflow_slug == "nightly" %> environment: <{ workflow_slug }> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index c823da809da..2c0b9e0c446 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -116,7 +116,7 @@ on: publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> name: Publish to PyPi(test) - if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} + if: ${{ !cancelled() && inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts @@ -178,11 +178,10 @@ on: draft-release: name: Draft Github Release - if: | - always() && (needs.test.result == 'success' || needs.test.result == 'skipped') && + if: ${{ !cancelled() && (needs.test.result == 'success' || needs.test.result == 'skipped') && (needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') && needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' && - needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' + needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' }} needs: - prepare-workflow - pre-commit diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 6ac1fdccba2..7bef19bf189 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -71,7 +71,7 @@ jobs: test-linux: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }} runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }} - if: toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]' }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} @@ -80,10 +80,6 @@ jobs: matrix: include: ${{ fromJSON(inputs.matrix)['linux-x86_64'] }} steps: - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - name: "Throttle Builds" shell: bash @@ -98,6 +94,16 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Setup Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + - name: Free Disk Space Before Build run: | echo "Disk space before cleanup:" @@ -154,39 +160,28 @@ jobs: free -h - name: "Create container ${{ matrix.container }}" + env: + GITHUB_ACTIONS: true + CI: true + SKIP_REQUIREMENTS_INSTALL: 1 + PRINT_TEST_SELECTION: 0 + PRINT_TEST_PLAN_ONLY: 0 + PRINT_SYSTEM_INFO: 0 + RERUN_FAILURES: 1 + GITHUB_ACTIONS_PIPELINE: 1 + SKIP_INITIAL_ONEDIR_FAILURES: 1 + SKIP_INITIAL_GH_ACTIONS_FAILURES: 1 + SKIP_CODE_COVERAGE: ${{ inputs.skip-code-coverage && '1' || '0' }} + CONVERAGE_CONTEXT: ${{ matrix.slug }} + COLUMNS: 190 + PIP_INDEX_URL: "${{ vars.PIP_INDEX_URL }}" + PIP_TRUSTED_HOST: "${{ vars.PIP_TRUSTED_HOST }}" + PIP_EXTRA_INDEX_URL: "${{ vars.PIP_EXTRA_INDEX_URL }}" + PIP_DISABLE_PIP_VERSION_CHECK: 1 + RAISE_DEPRECATIONS_RUNTIME_ERRORS: 1 + SALT_TRANSPORT: ${{ matrix.transport }} run: | - /usr/bin/docker \ - create --name ${{ github.run_id }}_salt-test \ - --workdir /__w/salt/salt \ - --privileged \ - --ulimit="nofile=262144:262144" \ - -e "HOME=/github/home" \ - -e GITHUB_ACTIONS=true \ - -e CI=true \ - -e SKIP_REQUIREMENTS_INSTALL=1 \ - -e PRINT_TEST_SELECTION=0 \ - -e PRINT_TEST_PLAN_ONLY=0 \ - -e PRINT_SYSTEM_INFO=0 \ - -e RERUN_FAILURES=1 \ - -e GITHUB_ACTIONS_PIPELINE=1 \ - -e SKIP_INITIAL_ONEDIR_FAILURES=1 \ - -e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \ - -e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \ - -e CONVERAGE_CONTEXT=${{ matrix.slug }} \ - -e COLUMNS=190 \ - -e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \ - -e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \ - -e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \ - -e PIP_DISABLE_PIP_VERSION_CHECK="1" \ - -e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \ - -e SALT_TRANSPORT=${{ matrix.transport }} \ - -e LANG="en_US.UTF-8" \ - -e SHELL=/bin/bash \ - -v "/home/runner/work":"/__w" \ - -v "/tmp/":"/var/lib/docker" \ - --entrypoint "/usr/lib/systemd/systemd" \ - ${{ matrix.container }} \ - --systemd --unit rescue.target + tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test - name: "Start container ${{ matrix.container }}" run: | @@ -387,7 +382,7 @@ jobs: test-linux-arm64: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }} runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-22.04' || inputs.linux_arm_runner }} - if: toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]' }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} @@ -396,10 +391,6 @@ jobs: matrix: include: ${{ fromJSON(inputs.matrix)['linux-arm64'] }} steps: - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - name: "Throttle Builds" shell: bash @@ -411,9 +402,25 @@ jobs: run: | echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV" + - name: Checkout Source Code uses: actions/checkout@v4 + - name: Setup Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + + - name: "Set `TIMESTAMP` environment variable" + shell: bash + run: | + echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV" + - name: Free Disk Space Before Build run: | echo "Disk space before cleanup:" @@ -470,39 +477,28 @@ jobs: free -h - name: "Create container ${{ matrix.container }}" + env: + GITHUB_ACTIONS: true + CI: true + SKIP_REQUIREMENTS_INSTALL: 1 + PRINT_TEST_SELECTION: 0 + PRINT_TEST_PLAN_ONLY: 0 + PRINT_SYSTEM_INFO: 0 + RERUN_FAILURES: 1 + GITHUB_ACTIONS_PIPELINE: 1 + SKIP_INITIAL_ONEDIR_FAILURES: 1 + SKIP_INITIAL_GH_ACTIONS_FAILURES: 1 + SKIP_CODE_COVERAGE: ${{ inputs.skip-code-coverage && '1' || '0' }} + CONVERAGE_CONTEXT: ${{ matrix.slug }} + COLUMNS: 190 + PIP_INDEX_URL: "${{ vars.PIP_INDEX_URL }}" + PIP_TRUSTED_HOST: "${{ vars.PIP_TRUSTED_HOST }}" + PIP_EXTRA_INDEX_URL: "${{ vars.PIP_EXTRA_INDEX_URL }}" + PIP_DISABLE_PIP_VERSION_CHECK: 1 + RAISE_DEPRECATIONS_RUNTIME_ERRORS: 1 + SALT_TRANSPORT: ${{ matrix.transport }} run: | - /usr/bin/docker \ - create --name ${{ github.run_id }}_salt-test \ - --workdir /__w/salt/salt \ - --privileged \ - --ulimit="nofile=262144:262144" \ - -e "HOME=/github/home" \ - -e GITHUB_ACTIONS=true \ - -e CI=true \ - -e SKIP_REQUIREMENTS_INSTALL=1 \ - -e PRINT_TEST_SELECTION=0 \ - -e PRINT_TEST_PLAN_ONLY=0 \ - -e PRINT_SYSTEM_INFO=0 \ - -e RERUN_FAILURES=1 \ - -e GITHUB_ACTIONS_PIPELINE=1 \ - -e SKIP_INITIAL_ONEDIR_FAILURES=1 \ - -e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \ - -e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \ - -e CONVERAGE_CONTEXT=${{ matrix.slug }} \ - -e COLUMNS=190 \ - -e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \ - -e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \ - -e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \ - -e PIP_DISABLE_PIP_VERSION_CHECK="1" \ - -e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \ - -e SALT_TRANSPORT=${{ matrix.transport }} \ - -e LANG="en_US.UTF-8" \ - -e SHELL=/bin/bash \ - -v "/home/runner/work":"/__w" \ - -v "/tmp/":"/var/lib/docker" \ - --entrypoint "/usr/lib/systemd/systemd" \ - ${{ matrix.container }} \ - --systemd --unit rescue.target + tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test - name: "Start container ${{ matrix.container }}" run: | @@ -705,7 +701,7 @@ jobs: runs-on: ${{ matrix.runner }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours - if: toJSON(fromJSON(inputs.matrix)['macos']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} strategy: fail-fast: false @@ -726,6 +722,11 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Setup Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + - name: Setup Salt Version run: | echo "${{ inputs.salt-version }}" > salt/_version.txt @@ -751,12 +752,6 @@ jobs: uses: actions/download-artifact@v4 with: name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }} - - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' @@ -983,7 +978,7 @@ jobs: test-windows: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }} - if: toJSON(fromJSON(inputs.matrix)['windows']) != '[]' + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} runs-on: ${{ matrix.slug }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours @@ -1031,7 +1026,7 @@ jobs: run: | echo true - - name: Set up Python ${{ inputs.python-version }} + - name: Setup Python ${{ inputs.python-version }} uses: actions/setup-python@v5 with: python-version: "${{ inputs.python-version }}" @@ -1349,7 +1344,7 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} + - name: Setup Python ${{ inputs.python-version }} uses: actions/setup-python@v5 with: python-version: "${{ inputs.python-version }}" diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 76cad098ca2..6508987dcbd 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -65,7 +65,7 @@ jobs: test-linux: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }} - if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }} + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: fail-fast: false @@ -86,6 +86,16 @@ jobs: - name: Checkout Source Code uses: actions/checkout@v4 + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v5 + with: + python-version: "${{ inputs.python-version }}" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + - name: Download Packages uses: actions/download-artifact@v4 with: @@ -105,11 +115,6 @@ jobs: cd artifacts tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz - - name: Set up Python ${{ inputs.python-version }} - uses: actions/setup-python@v5 - with: - python-version: "${{ inputs.python-version }}" - - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' @@ -135,7 +140,7 @@ jobs: - name: "Create container ${{ matrix.container }}" run: | - /usr/bin/docker create --name ${{ github.run_id }}_salt-test-pkg --workdir /__w/salt/salt --privileged -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true -v "/var/run/docker.sock":"/var/run/docker.sock" -v "/home/runner/work":"/__w" -v "/home/runner/work/_temp":"/__w/_temp" -v "/home/runner/work/_actions":"/__w/_actions" -v "/opt/hostedtoolcache":"/__t" -v "/home/runner/work/_temp/_github_home":"/github/home" -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" --entrypoint "/usr/lib/systemd/systemd" ${{ matrix.container }} --systemd --unit rescue.target + tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test-pkg - name: "Start container ${{ matrix.container }}" run: | @@ -145,31 +150,21 @@ jobs: run: | docker exec ${{ github.run_id}}_salt-test-pkg python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - with: - cache-prefix: ${{ inputs.cache-prefix }} - - name: List Free Space run: | df -h || true - name: Show System Info - env: - SKIP_REQUIREMENTS_INSTALL: "1" - PRINT_SYSTEM_INFO_ONLY: "1" run: | - docker exec ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} + docker exec \ + -e SKIP_REQUIREMENTS_INSTALL=1 \ + -e PRINT_SYSTEM_INFO_ONLY=1 \ + ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} - name: Run Package Tests - env: - SKIP_REQUIREMENTS_INSTALL: "1" - RERUN_FAILURES: "1" - GITHUB_ACTIONS_PIPELINE: "1" - SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" - COVERAGE_CONTEXT: ${{ matrix.slug }} run: | - /usr/bin/docker exec ${{ github.run_id }}_salt-test-pkg \ + docker exec \ + ${{ github.run_id }}_salt-test-pkg \ python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}} @@ -197,7 +192,7 @@ jobs: test-macos: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} runs-on: ${{ matrix.runner }} - if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }} timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time) strategy: fail-fast: false @@ -325,7 +320,7 @@ jobs: name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} runs-on: ${{ matrix.slug }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong - if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} + if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }} strategy: fail-fast: false matrix: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0d7814706b9..f280eb381a6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -30,7 +30,7 @@ repos: # case-insensitive filesystem like MacOS HFS+ or Windows FAT. - id: check-json # Attempts to load all json files to verify syntax. - id: check-symlinks # Checks for symlinks which do not point to anything. - - id: debug-statements # Check for debugger imports and py37+ breakpoint() calls in python source. + - id: debug-statements # Check for debugger imports and breakpoint() calls in python source. exclude: > (?x)^( templates/.* @@ -145,21 +145,6 @@ repos: rev: "4.8" hooks: - - id: pip-tools-compile - alias: compile-pkg-linux-3.8-zmq-requirements - name: Linux Packaging Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.8/linux\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --no-emit-index-url - - requirements/static/pkg/linux.in - - id: pip-tools-compile alias: compile-pkg-linux-3.9-zmq-requirements name: Linux Packaging Py3.9 ZeroMQ Requirements @@ -220,22 +205,6 @@ repos: - --no-emit-index-url - requirements/static/pkg/linux.in - - - id: pip-tools-compile - alias: compile-pkg-freebsd-3.8-zmq-requirements - name: FreeBSD Packaging Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.8/freebsd\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=freebsd - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --no-emit-index-url - - requirements/static/pkg/freebsd.in - - id: pip-tools-compile alias: compile-pkg-freebsd-3.9-zmq-requirements name: FreeBSD Packaging Py3.9 ZeroMQ Requirements @@ -352,20 +321,6 @@ repos: - --no-emit-index-url - requirements/static/pkg/darwin.in - - id: pip-tools-compile - alias: compile-pkg-windows-3.8-zmq-requirements - name: Windows Packaging Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.8/windows\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=windows - - --include=requirements/windows.txt - - --no-emit-index-url - - requirements/static/pkg/windows.in - - id: pip-tools-compile alias: compile-pkg-windows-3.9-zmq-requirements name: Windows Packaging Py3.9 ZeroMQ Requirements @@ -425,23 +380,6 @@ repos: # <---- Packaging Requirements ------------------------------------------------------------------------------------- # ----- CI Requirements -------------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-linux-3.8-zmq-requirements - name: Linux CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.8/linux\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - - --include=requirements/static/pkg/linux.in - - --include=requirements/static/ci/common.in - - --no-emit-index-url - - requirements/static/ci/linux.in - id: pip-tools-compile alias: compile-ci-linux-3.9-zmq-requirements @@ -515,20 +453,6 @@ repos: - --no-emit-index-url - requirements/static/ci/linux.in - - id: pip-tools-compile - alias: compile-ci-linux-crypto-3.8-requirements - name: Linux CI Py3.8 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/linux-crypto\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --out-prefix=linux - - --no-emit-index-url - - requirements/static/ci/crypto.in - - id: pip-tools-compile alias: compile-ci-linux-crypto-3.9-requirements name: Linux CI Py3.9 Crypto Requirements @@ -585,25 +509,6 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in - - - id: pip-tools-compile - alias: compile-ci-freebsd-3.8-zmq-requirements - name: FreeBSD CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/((freebsd|common)\.in|py3\.8/freebsd\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=freebsd - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - --include=requirements/pytest.txt - - --include=requirements/static/pkg/freebsd.in - - --include=requirements/static/ci/common.in - - --no-emit-index-url - - requirements/static/ci/freebsd.in - - id: pip-tools-compile alias: compile-ci-freebsd-3.9-zmq-requirements name: FreeBSD CI Py3.9 ZeroMQ Requirements @@ -676,23 +581,10 @@ repos: - --no-emit-index-url - requirements/static/ci/freebsd.in - - id: pip-tools-compile - alias: compile-ci-freebsd-crypto-3.8-requirements - name: FreeBSD CI Py3.8 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/freebsd-crypto\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=freebsd - - --out-prefix=freebsd - - --no-emit-index-url - - requirements/static/ci/crypto.in - - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.9-requirements name: FreeBSD CI Py3.9 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.9/freebsd-crypto\.txt))$ pass_filenames: false args: @@ -871,23 +763,6 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in - - id: pip-tools-compile - alias: compile-ci-windows-3.8-zmq-requirements - name: Windows CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/((windows|common)\.in|py3\.8/windows\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=windows - - --include=requirements/windows.txt - - --include=requirements/pytest.txt - - --include=requirements/static/pkg/windows.in - - --include=requirements/static/ci/common.in - - --no-emit-index-url - - requirements/static/ci/windows.in - - id: pip-tools-compile alias: compile-ci-windows-3.9-zmq-requirements name: Windows CI Py3.9 ZeroMQ Requirements @@ -956,20 +831,6 @@ repos: - --no-emit-index-url - requirements/static/ci/windows.in - - id: pip-tools-compile - alias: compile-ci-windows-crypto-3.8-requirements - name: Windows CI Py3.8 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/windows-crypto\.txt))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=windows - - --out-prefix=windows - - --no-emit-index-url - - requirements/static/ci/crypto.in - - id: pip-tools-compile alias: compile-ci-windows-crypto-3.9-requirements name: Windows CI Py3.9 Crypto Requirements @@ -1029,18 +890,6 @@ repos: # <---- CI Requirements -------------------------------------------------------------------------------------------- # ----- Cloud CI Requirements -------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-cloud-3.8-requirements - name: Cloud CI Py3.8 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.8/cloud\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --no-emit-index-url - - requirements/static/ci/cloud.in - - id: pip-tools-compile alias: compile-ci-cloud-3.9-requirements name: Cloud CI Py3.9 Requirements @@ -1096,19 +945,6 @@ repos: # <---- Cloud CI Requirements -------------------------------------------------------------------------------------- # ----- Doc CI Requirements ---------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-doc-requirements - name: Docs CI Py3.8 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --no-emit-index-url - - requirements/static/ci/docs.in - - id: pip-tools-compile alias: compile-doc-requirements name: Docs CI Py3.9 Requirements @@ -1167,19 +1003,6 @@ repos: # ----- Lint CI Requirements --------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-lint-3.8-requirements - name: Lint CI Py3.8 Requirements - files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.8/linux\.txt)))$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --no-emit-index-url - - requirements/static/ci/lint.in - - id: pip-tools-compile alias: compile-ci-lint-3.9-requirements name: Lint CI Py3.9 Requirements @@ -1240,19 +1063,6 @@ repos: # <---- Lint CI Requirements --------------------------------------------------------------------------------------- # ----- Changelog -------------------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-changelog-3.8-requirements - name: Changelog CI Py3.8 Requirements - files: ^requirements/static/ci/(changelog\.in|py3\.8/(changelog|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --platform=linux - - --no-emit-index-url - - requirements/static/ci/changelog.in - - id: pip-tools-compile alias: compile-ci-changelog-3.9-requirements name: Changelog CI Py3.9 Requirements @@ -1401,8 +1211,8 @@ repos: rev: v3.15.1 hooks: - id: pyupgrade - name: Upgrade code for Py3.8+ - args: [--py38-plus, --keep-mock] + name: Upgrade code to Py3.10+ + args: [--py310-plus, --keep-mock] exclude: > (?x)^( salt/client/ssh/ssh_py_shim.py diff --git a/changelog/44736.fixed.md b/changelog/44736.fixed.md new file mode 100644 index 00000000000..eee06decc06 --- /dev/null +++ b/changelog/44736.fixed.md @@ -0,0 +1,2 @@ +Commands on Windows are now prefixed with ``cmd /c`` so that compound +commands (commands separated by ``&&``) run properly when using ``runas`` diff --git a/changelog/59977.fixed.md b/changelog/59977.fixed.md new file mode 100644 index 00000000000..9069a8d621b --- /dev/null +++ b/changelog/59977.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue on Windows where checking success_retcodes when using the +runas parameter would fail. Now success_retcodes are checked correctly diff --git a/changelog/60884.fixed.md b/changelog/60884.fixed.md new file mode 100644 index 00000000000..85f074e7b67 --- /dev/null +++ b/changelog/60884.fixed.md @@ -0,0 +1,2 @@ +Fix an issue with cmd.script in Windows so that the exit code from a script will +be passed through to the retcode of the state diff --git a/changelog/61416.fixed.md b/changelog/61416.fixed.md new file mode 100644 index 00000000000..58dd7a27783 --- /dev/null +++ b/changelog/61416.fixed.md @@ -0,0 +1 @@ +Ensure file clients for runner, wheel, local and caller are available from the client_cache if called upon. diff --git a/changelog/66592.fixed.md b/changelog/66592.fixed.md new file mode 100644 index 00000000000..228e35292b0 --- /dev/null +++ b/changelog/66592.fixed.md @@ -0,0 +1 @@ +Fix minion config option startup_states diff --git a/changelog/66637.fixed.md b/changelog/66637.fixed.md new file mode 100644 index 00000000000..12b6759245f --- /dev/null +++ b/changelog/66637.fixed.md @@ -0,0 +1,4 @@ +Fixes an issue when getting account names using the get_name function in the +win_dacl.py salt util. Capability SIDs return ``None``. SIDs for deleted +accounts return the SID. SIDs for domain accounts where the system is not +connected to the domain return the SID. diff --git a/changelog/66932.fixed.md b/changelog/66932.fixed.md new file mode 100644 index 00000000000..6209b057fed --- /dev/null +++ b/changelog/66932.fixed.md @@ -0,0 +1 @@ +Ensure minion start event coroutines are run diff --git a/changelog/67057.fixed.md b/changelog/67057.fixed.md new file mode 100644 index 00000000000..2f719be4cde --- /dev/null +++ b/changelog/67057.fixed.md @@ -0,0 +1 @@ +Added support for dnf5 (backport from 3007) and update to its new command syntax changes since 2023 diff --git a/changelog/67091.fixed.md b/changelog/67091.fixed.md new file mode 100644 index 00000000000..8b4ef596813 --- /dev/null +++ b/changelog/67091.fixed.md @@ -0,0 +1 @@ +Fix yumpkg module for Python<3.8 diff --git a/changelog/67177.fixed.md b/changelog/67177.fixed.md new file mode 100644 index 00000000000..e91c8faead8 --- /dev/null +++ b/changelog/67177.fixed.md @@ -0,0 +1 @@ +Added support and tests for dnf5 to services_need_restart for yum packages diff --git a/changelog/67184.removed.md b/changelog/67184.removed.md new file mode 100644 index 00000000000..a088d8bcfbd --- /dev/null +++ b/changelog/67184.removed.md @@ -0,0 +1 @@ +Removed dependency on bsdmainutils package for Debian and Ubuntu diff --git a/changelog/67722.fixed.md b/changelog/67722.fixed.md new file mode 100644 index 00000000000..19d49f3f55d --- /dev/null +++ b/changelog/67722.fixed.md @@ -0,0 +1 @@ +Use os.walk to traverse git branches, and no longer replace slash '/' in git branch names diff --git a/changelog/67729.deprecated.md b/changelog/67729.deprecated.md new file mode 100644 index 00000000000..9f736524b67 --- /dev/null +++ b/changelog/67729.deprecated.md @@ -0,0 +1 @@ +Removed support for end of life Python 3.7 and 3.8 from pre-commit and requirements diff --git a/changelog/67733.fixed.md b/changelog/67733.fixed.md new file mode 100644 index 00000000000..242f65ec762 --- /dev/null +++ b/changelog/67733.fixed.md @@ -0,0 +1 @@ +Set correct virtual grain in systemd based Podman containers diff --git a/changelog/67743.fixed.md b/changelog/67743.fixed.md new file mode 100644 index 00000000000..2e926595677 --- /dev/null +++ b/changelog/67743.fixed.md @@ -0,0 +1 @@ +Corrected option --upgrades for dnf[5] for function list_upgrades diff --git a/changelog/67769.fixed.md b/changelog/67769.fixed.md new file mode 100644 index 00000000000..bd7da12a7f0 --- /dev/null +++ b/changelog/67769.fixed.md @@ -0,0 +1 @@ +Corrected dnf5 option --downloadonly for dnf5 install diff --git a/changelog/67792.fixed.md b/changelog/67792.fixed.md new file mode 100644 index 00000000000..ca00fc31ccf --- /dev/null +++ b/changelog/67792.fixed.md @@ -0,0 +1,2 @@ +Upgrade relenv to 0.18.1. Which includes python 3.10.16 and openssl 3.2.4. +Openssl 3.2.4 fixes CVE-2024-12797 and CVE-2024-13176 diff --git a/changelog/67794.fixed.md b/changelog/67794.fixed.md new file mode 100644 index 00000000000..0805655b1dc --- /dev/null +++ b/changelog/67794.fixed.md @@ -0,0 +1,2 @@ +Update jinja2 to 3.1.5, advisories GHSA-q2x7-8rv6-6q7h and GHSA-gmj6-6f8f-6699 +Update urllib3 to 1.26.18 advisories GHSA-34jh-p97f-mpxf diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index d9a08d10f46..9fa346fe303 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,6 +1,6 @@ nox_version: "2022.8.7" -python_version: "3.10.15" -relenv_version: "0.18.0" +python_version: "3.10.16" +relenv_version: "0.18.1" release_branches: - "3006.x" - "3007.x" diff --git a/pkg/debian/control b/pkg/debian/control index c08d99d5e23..4f1a2b96352 100644 --- a/pkg/debian/control +++ b/pkg/debian/control @@ -79,8 +79,7 @@ Package: salt-minion Architecture: amd64 arm64 Replaces: salt-common (<= 3006.4) Breaks: salt-common (<= 3006.4) -Depends: bsdmainutils, - dctrl-tools, +Depends: dctrl-tools, salt-common (= ${source:Version}), ${misc:Depends} Recommends: debconf-utils, dmidecode, net-tools diff --git a/requirements/base.txt b/requirements/base.txt index fe9df1324e1..1a033a014fb 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,6 +1,6 @@ --constraint=constraints.txt -Jinja2 +Jinja2>=3.1.5 jmespath msgpack>=1.0.0 PyYAML @@ -16,6 +16,8 @@ packaging>=21.3 looseversion tornado>=6.3.3 aiohttp>=3.9.0 +urllib3>=1.26.18 + croniter>=0.3.0,!=0.3.22; sys_platform != 'win32' # We need contextvars for salt-ssh. diff --git a/requirements/static/ci/py3.10/changelog.txt b/requirements/static/ci/py3.10/changelog.txt index 962d840ca4a..6465c9f6ef3 100644 --- a/requirements/static/ci/py3.10/changelog.txt +++ b/requirements/static/ci/py3.10/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==22.10.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.10/linux.txt # towncrier diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 40242f1837e..0798070f5bd 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -196,7 +196,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -540,6 +540,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 367514ad7f7..5207291100d 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -62,7 +62,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.10/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/docs.in diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 61df09a7d5f..ceb82b39b8a 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -195,7 +195,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -545,6 +545,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index f544b863b77..9678615134b 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -219,7 +219,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -609,6 +609,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 262f507d77a..9fff8af5b39 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -193,7 +193,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -492,6 +492,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/changelog.txt b/requirements/static/ci/py3.11/changelog.txt index 70831ca5aa8..0ec4b21d43c 100644 --- a/requirements/static/ci/py3.11/changelog.txt +++ b/requirements/static/ci/py3.11/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==17.5.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.11/linux.txt # towncrier diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index e26cde42667..8f9a4a0b2a4 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -189,7 +189,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt @@ -531,6 +531,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt index bca5bf5fac9..7519292a8f7 100644 --- a/requirements/static/ci/py3.11/docs.txt +++ b/requirements/static/ci/py3.11/docs.txt @@ -62,7 +62,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.11/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/docs.in diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index ea00b8c225e..24dab4c9f2a 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -188,7 +188,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt @@ -537,6 +537,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 4fc21c945fb..43053f9d66a 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -210,7 +210,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt @@ -599,6 +599,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index ff15720264a..c1dd692e22c 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -186,7 +186,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt @@ -483,6 +483,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/changelog.txt b/requirements/static/ci/py3.12/changelog.txt index e35cbed1512..e857893819c 100644 --- a/requirements/static/ci/py3.12/changelog.txt +++ b/requirements/static/ci/py3.12/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==17.5.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.12/linux.txt # towncrier diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index fd7728e7fcc..aa3a9aeda28 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -259,7 +259,7 @@ jaraco.text==3.11.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt @@ -764,6 +764,7 @@ urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 8d8ec9c504f..3ab9c9defdd 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -189,7 +189,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # -r requirements/base.txt @@ -531,6 +531,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt index f1b146882ed..ea402482746 100644 --- a/requirements/static/ci/py3.12/docs.txt +++ b/requirements/static/ci/py3.12/docs.txt @@ -111,7 +111,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt @@ -284,6 +284,7 @@ uc-micro-py==1.0.1 urllib3==1.26.18 # via # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt # requests yarl==1.9.4 # via diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index f151e877d33..92cc880afc3 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -188,7 +188,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # -r requirements/base.txt @@ -537,6 +537,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index 715314c3629..1ee116b9577 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -286,7 +286,7 @@ jaraco.text==3.11.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt @@ -771,6 +771,7 @@ urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 9eeaef8b397..4a2f9f7bb23 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -210,7 +210,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -r requirements/base.txt @@ -599,6 +599,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index f1deb4869ce..f06c6d7bd4b 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -186,7 +186,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # -r requirements/base.txt @@ -483,6 +483,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.8/freebsd-crypto.txt b/requirements/static/ci/py3.8/freebsd-crypto.txt deleted file mode 100644 index ce772a5d00b..00000000000 --- a/requirements/static/ci/py3.8/freebsd-crypto.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd-crypto.txt requirements/static/ci/crypto.in -# -m2crypto==0.38.0 - # via -r requirements/static/ci/crypto.in -pycryptodome==3.19.1 - # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/linux-crypto.txt b/requirements/static/ci/py3.8/linux-crypto.txt deleted file mode 100644 index 9d91dda3caf..00000000000 --- a/requirements/static/ci/py3.8/linux-crypto.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/linux-crypto.txt requirements/static/ci/crypto.in -# -m2crypto==0.38.0 - # via -r requirements/static/ci/crypto.in -pycryptodome==3.19.1 - # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/windows-crypto.txt b/requirements/static/ci/py3.8/windows-crypto.txt deleted file mode 100644 index 6f353e6190a..00000000000 --- a/requirements/static/ci/py3.8/windows-crypto.txt +++ /dev/null @@ -1,10 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows-crypto.txt requirements/static/ci/crypto.in -# -m2crypto==0.38.0 - # via -r requirements/static/ci/crypto.in -pycryptodome==3.19.1 - # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.9/changelog.txt b/requirements/static/ci/py3.9/changelog.txt index 4003a98c808..deda7e0fd5b 100644 --- a/requirements/static/ci/py3.9/changelog.txt +++ b/requirements/static/ci/py3.9/changelog.txt @@ -13,7 +13,7 @@ click==8.1.3 # towncrier incremental==22.10.0 # via towncrier -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.9/linux.txt # towncrier diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 28c6cfb4278..6b39a7baad9 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -196,7 +196,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -541,6 +541,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index bc711d0b691..ff40bab7188 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -66,7 +66,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/py3.9/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/docs.in diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index d9debff52c9..f090389cdb7 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -195,7 +195,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -546,6 +546,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 249ea7f574a..283539d368b 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -214,7 +214,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -598,6 +598,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index ff210ca14b2..9f8628a30d0 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -193,7 +193,7 @@ jaraco.text==3.11.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -494,6 +494,7 @@ typing-extensions==4.8.0 urllib3==1.26.18 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/base.txt # botocore # docker # kubernetes diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index 595d3a7cbd9..0dbb6ca834a 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -133,7 +133,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index 7b924ca00a5..2a241621f15 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -133,7 +133,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 49cd88ed3c3..65ad39df2ed 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -135,7 +135,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 37cdd9826a5..7f1287b8964 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -69,7 +69,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -149,7 +149,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt index aa873046d75..04432490f63 100644 --- a/requirements/static/pkg/py3.11/darwin.txt +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -131,7 +131,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt index 98912ee2a55..6ddc03cef40 100644 --- a/requirements/static/pkg/py3.11/freebsd.txt +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -133,7 +133,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt index a54387e807c..857bf24de9b 100644 --- a/requirements/static/pkg/py3.11/linux.txt +++ b/requirements/static/pkg/py3.11/linux.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -135,7 +135,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index e9c7b06e320..244a3e7ce04 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -67,7 +67,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -147,7 +147,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt index ebce1ec3b82..938615e7783 100644 --- a/requirements/static/pkg/py3.12/darwin.txt +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -131,7 +131,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt index c741fd71784..401f2086b6c 100644 --- a/requirements/static/pkg/py3.12/freebsd.txt +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -133,7 +133,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt index a66ffb814be..df385c2ab41 100644 --- a/requirements/static/pkg/py3.12/linux.txt +++ b/requirements/static/pkg/py3.12/linux.txt @@ -61,7 +61,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -135,7 +135,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt index 79c33f9258f..a43c9deaf31 100644 --- a/requirements/static/pkg/py3.12/windows.txt +++ b/requirements/static/pkg/py3.12/windows.txt @@ -67,7 +67,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -147,7 +147,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 5f547422f75..70bc01d852c 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -133,7 +133,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index c3dd38187b7..425798a05b8 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -133,7 +133,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index d73ec84b915..de4eefcc5c4 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -63,7 +63,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -135,7 +135,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests yarl==1.9.4 # via aiohttp zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index c3aad36e727..32e8d5a3556 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -69,7 +69,7 @@ jaraco.functools==3.7.0 # tempora jaraco.text==3.11.1 # via jaraco.collections -jinja2==3.1.4 +jinja2==3.1.6 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt @@ -150,7 +150,9 @@ typing-extensions==4.8.0 # pydantic # pydantic-core urllib3==1.26.18 - # via requests + # via + # -r requirements/base.txt + # requests wmi==1.5.1 ; sys_platform == "win32" # via -r requirements/base.txt xmltodict==0.13.0 ; sys_platform == "win32" diff --git a/salt/_logging/impl.py b/salt/_logging/impl.py index 321ccf794b5..e531459ea68 100644 --- a/salt/_logging/impl.py +++ b/salt/_logging/impl.py @@ -299,6 +299,8 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta): except TypeError: # Python < 3.8 - We still need this for salt-ssh since it will use # the system python, and not out onedir. + # stacklevel was introduced in Py 3.8 + # must be running on old OS with Python 3.6 or 3.7 LOGGING_LOGGER_CLASS._log( self, level, diff --git a/salt/grains/core.py b/salt/grains/core.py index 2538dd8c64c..84f0d2ac433 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -930,6 +930,10 @@ def _virtual(osdata): grains["virtual"] = "container" grains["virtual_subtype"] = "LXC" break + elif "podman" in output: + grains["virtual"] = "container" + grains["virtual_subtype"] = "Podman" + break elif "amazon" in output: grains["virtual"] = "Nitro" grains["virtual_subtype"] = "Amazon EC2" diff --git a/salt/minion.py b/salt/minion.py index 53beadb79a7..56ddcbb08c5 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -2508,7 +2508,7 @@ class Minion(MinionBase): else: data["fun"] = "state.highstate" data["arg"] = [] - self._handle_decoded_payload(data) + self.io_loop.add_callback(self._handle_decoded_payload, data) def _refresh_grains_watcher(self, refresh_interval_in_minutes): """ @@ -2529,6 +2529,7 @@ class Minion(MinionBase): } ) + @salt.ext.tornado.gen.coroutine def _fire_master_minion_start(self): include_grains = False if self.opts["start_event_grains"]: @@ -2536,13 +2537,13 @@ class Minion(MinionBase): # Send an event to the master that the minion is live if self.opts["enable_legacy_startup_events"]: # Old style event. Defaults to False in 3001 release. - self._fire_master_main( + yield self._fire_master_main( "Minion {} started at {}".format(self.opts["id"], time.asctime()), "minion_start", include_startup_grains=include_grains, ) # send name spaced event - self._fire_master_main( + yield self._fire_master_main( "Minion {} started at {}".format(self.opts["id"], time.asctime()), tagify([self.opts["id"], "start"], "minion"), include_startup_grains=include_grains, @@ -2981,7 +2982,7 @@ class Minion(MinionBase): # make the schedule to use the new 'functions' loader self.schedule.functions = self.functions self.pub_channel.on_recv(self._handle_payload) - self._fire_master_minion_start() + yield self._fire_master_minion_start() log.info("Minion is ready to receive requests!") # update scheduled job to run with the new master addr @@ -3230,7 +3231,7 @@ class Minion(MinionBase): self.setup_scheduler(before_connect=True) self.sync_connect_master() if self.connected: - self._fire_master_minion_start() + self.io_loop.add_callback(self._fire_master_minion_start) log.info("Minion is ready to receive requests!") # Make sure to gracefully handle SIGUSR1 @@ -3273,7 +3274,8 @@ class Minion(MinionBase): "minion is running under an init system." ) - self._fire_master_main( + self.io_loop.add_callback( + self._fire_master_main, "ping", "minion_ping", timeout_handler=ping_timeout_handler, diff --git a/salt/modules/baredoc.py b/salt/modules/baredoc.py index 7513b546919..1d91ade25a6 100644 --- a/salt/modules/baredoc.py +++ b/salt/modules/baredoc.py @@ -10,7 +10,6 @@ import ast import itertools import logging import os -from typing import Dict, List import salt.utils.doc import salt.utils.files @@ -36,7 +35,7 @@ def _get_module_name(tree, filename: str) -> str: return module_name -def _get_func_aliases(tree) -> Dict: +def _get_func_aliases(tree) -> dict: """ Get __func_alias__ dict for mapping function names """ @@ -54,7 +53,7 @@ def _get_func_aliases(tree) -> Dict: return fun_aliases -def _get_args(function: str) -> Dict: +def _get_args(function: str) -> dict: """ Given a function def, returns arguments and defaults """ @@ -128,7 +127,7 @@ def _parse_module_docs(module_path, mod_name=None): return salt.utils.doc.strip_rst(ret) -def _parse_module_functions(module_py: str, return_type: str) -> Dict: +def _parse_module_functions(module_py: str, return_type: str) -> dict: """ Parse module files for proper module_name and function name, then gather functions and possibly arguments @@ -161,7 +160,7 @@ def _parse_module_functions(module_py: str, return_type: str) -> Dict: return ret -def _get_files(name=False, type="states", return_type="args") -> List: +def _get_files(name=False, type="states", return_type="args") -> list: """ Determine if modules/states directories or files are requested diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py index fe1d4412d00..4c7fd40d02a 100644 --- a/salt/modules/cmdmod.py +++ b/salt/modules/cmdmod.py @@ -283,7 +283,10 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd): new_cmd.append("-Command") if isinstance(cmd, list): cmd = " ".join(cmd) - new_cmd.append(f"& {cmd.strip()}") + # We need to append $LASTEXITCODE here to return the actual exit code + # from the script. Otherwise, it will always return 1 on any non-zero + # exit code failure. Issue: #60884 + new_cmd.append(f"& {cmd.strip()}; exit $LASTEXITCODE") elif encoded_cmd: new_cmd.extend(["-EncodedCommand", f"{cmd}"]) else: @@ -293,10 +296,10 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd): # Commands that are a specific keyword behave differently. They fail if # you add a "&" to the front. Add those here as we find them: - keywords = ["$", "&", ".", "Configuration"] + keywords = ["$", "&", ".", "Configuration", "try"] for keyword in keywords: - if cmd.startswith(keyword): + if cmd.lower().startswith(keyword.lower()): new_cmd.extend(["-Command", f"{cmd.strip()}"]) break else: @@ -455,8 +458,6 @@ def _run( if isinstance(cmd, (list, tuple)): cmd = " ".join(cmd) - return win_runas(cmd, runas, password, cwd) - if runas and salt.utils.platform.is_darwin(): # We need to insert the user simulation into the command itself and not # just run it from the environment on macOS as that method doesn't work @@ -489,7 +490,7 @@ def _run( # hang. runas = None - if runas: + if runas and not salt.utils.platform.is_windows(): # Save the original command before munging it try: pwd.getpwnam(runas) @@ -510,7 +511,7 @@ def _run( else: use_sudo = True - if runas or group: + if (runas or group) and not salt.utils.platform.is_windows(): try: # Getting the environment for the runas user # Use markers to thwart any stdout noise @@ -749,90 +750,104 @@ def _run( if not use_vt: # This is where the magic happens - try: + + if runas and salt.utils.platform.is_windows(): + + # We can't use TimedProc with runas on Windows if change_windows_codepage: salt.utils.win_chcp.set_codepage_id(windows_codepage) - try: - proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs) - except OSError as exc: - msg = "Unable to run command '{}' with the context '{}', reason: {}".format( - cmd if output_loglevel is not None else "REDACTED", - new_kwargs, - exc, - ) - raise CommandExecutionError(msg) - try: - proc.run() - except TimedProcTimeoutError as exc: - ret["stdout"] = str(exc) - ret["stderr"] = "" - ret["retcode"] = None - ret["pid"] = proc.process.pid - # ok return code for timeouts? - ret["retcode"] = 1 - return ret - finally: + ret = win_runas(cmd, runas, password, cwd) + if change_windows_codepage: salt.utils.win_chcp.set_codepage_id(previous_windows_codepage) - if output_loglevel != "quiet" and output_encoding is not None: - log.debug( - "Decoding output from command %s using %s encoding", - cmd, - output_encoding, - ) + else: + try: + if change_windows_codepage: + salt.utils.win_chcp.set_codepage_id(windows_codepage) + try: + proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs) + except OSError as exc: + msg = "Unable to run command '{}' with the context '{}', reason: {}".format( + cmd if output_loglevel is not None else "REDACTED", + new_kwargs, + exc, + ) + raise CommandExecutionError(msg) - try: - out = salt.utils.stringutils.to_unicode( - proc.stdout, encoding=output_encoding - ) - except TypeError: - # stdout is None - out = "" - except UnicodeDecodeError: - out = salt.utils.stringutils.to_unicode( - proc.stdout, encoding=output_encoding, errors="replace" - ) - if output_loglevel != "quiet": - log.error( - "Failed to decode stdout from command %s, non-decodable " - "characters have been replaced", - _log_cmd(cmd), + try: + proc.run() + except TimedProcTimeoutError as exc: + ret["stdout"] = str(exc) + ret["stderr"] = "" + ret["retcode"] = None + ret["pid"] = proc.process.pid + # ok return code for timeouts? + ret["retcode"] = 1 + return ret + finally: + if change_windows_codepage: + salt.utils.win_chcp.set_codepage_id(previous_windows_codepage) + + if output_loglevel != "quiet" and output_encoding is not None: + log.debug( + "Decoding output from command %s using %s encoding", + cmd, + output_encoding, ) - try: - err = salt.utils.stringutils.to_unicode( - proc.stderr, encoding=output_encoding - ) - except TypeError: - # stderr is None - err = "" - except UnicodeDecodeError: - err = salt.utils.stringutils.to_unicode( - proc.stderr, encoding=output_encoding, errors="replace" - ) - if output_loglevel != "quiet": - log.error( - "Failed to decode stderr from command %s, non-decodable " - "characters have been replaced", - _log_cmd(cmd), + try: + out = salt.utils.stringutils.to_unicode( + proc.stdout, encoding=output_encoding ) + except TypeError: + # stdout is None + out = "" + except UnicodeDecodeError: + out = salt.utils.stringutils.to_unicode( + proc.stdout, encoding=output_encoding, errors="replace" + ) + if output_loglevel != "quiet": + log.error( + "Failed to decode stdout from command %s, non-decodable " + "characters have been replaced", + _log_cmd(cmd), + ) + + try: + err = salt.utils.stringutils.to_unicode( + proc.stderr, encoding=output_encoding + ) + except TypeError: + # stderr is None + err = "" + except UnicodeDecodeError: + err = salt.utils.stringutils.to_unicode( + proc.stderr, encoding=output_encoding, errors="replace" + ) + if output_loglevel != "quiet": + log.error( + "Failed to decode stderr from command %s, non-decodable " + "characters have been replaced", + _log_cmd(cmd), + ) + + # Encoded commands dump CLIXML data in stderr. It's not an actual error + if encoded_cmd and "CLIXML" in err: + err = "" + if rstrip: + if out is not None: + out = out.rstrip() + if err is not None: + err = err.rstrip() + ret["pid"] = proc.process.pid + ret["retcode"] = proc.process.returncode + ret["stdout"] = out + ret["stderr"] = err - # Encoded commands dump CLIXML data in stderr. It's not an actual error - if encoded_cmd and "CLIXML" in err: - err = "" - if rstrip: - if out is not None: - out = out.rstrip() - if err is not None: - err = err.rstrip() - ret["pid"] = proc.process.pid - ret["retcode"] = proc.process.returncode if ret["retcode"] in success_retcodes: ret["retcode"] = 0 - ret["stdout"] = out - ret["stderr"] = err if any( [stdo in ret["stdout"] for stdo in success_stdout] + [stde in ret["stderr"] for stde in success_stderr] @@ -4096,16 +4111,16 @@ def powershell( # ConvertTo-JSON is only available on PowerShell 3.0 and later psversion = shell_info("powershell")["psversion"] if salt.utils.versions.version_cmp(psversion, "2.0") == 1: - cmd += " | ConvertTo-JSON" + cmd += " | ConvertTo-JSON " if depth is not None: - cmd += f" -Depth {depth}" + cmd += f"-Depth {depth} " # Put the whole command inside a try / catch block # Some errors in PowerShell are not "Terminating Errors" and will not be # caught in a try/catch block. For example, the `Get-WmiObject` command will # often return a "Non Terminating Error". To fix this, make sure # `-ErrorAction Stop` is set in the powershell command - cmd = "try {" + cmd + '} catch { "{}" }' + cmd = "try { " + cmd + ' } catch { "{}" }' if encode_cmd: # Convert the cmd to UTF-16LE without a BOM and base64 encode. @@ -4117,7 +4132,7 @@ def powershell( cmd = salt.utils.stringutils.to_str(cmd) encoded_cmd = True else: - cmd = f"{{{cmd}}}" + cmd = f"{{ {cmd} }}" encoded_cmd = False # Retrieve the response, while overriding shell with 'powershell' diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py index e3de699d625..0bcc2ac9163 100644 --- a/salt/modules/win_file.py +++ b/salt/modules/win_file.py @@ -497,13 +497,14 @@ def get_group(path, follow_symlinks=True): def uid_to_user(uid): """ - Convert a uid to a user name + Convert a User ID (uid) to a username Args: uid (str): The user id to lookup Returns: - str: The name of the user + str: The name of the user. The ``uid`` will be returned if there is no + corresponding username CLI Example: diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index 7c5c017fc89..da275d4a55b 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -15,6 +15,9 @@ Support for YUM/DNF .. versionadded:: 3003 Support for ``tdnf`` on Photon OS. +.. versionadded:: 3006.10 + Support for ``dnf5``` on Fedora 41 + .. versionadded:: 3007.0 Support for ``dnf5``` on Fedora 39 """ @@ -1034,7 +1037,7 @@ def list_upgrades(refresh=True, **kwargs): cmd = ["--quiet"] cmd.extend(options) - cmd.extend(["list", "upgrades" if _yum() in ("dnf", "dnf5") else "updates"]) + cmd.extend(["list", "--upgrades" if _yum() in ("dnf", "dnf5") else "updates"]) out = _call_yum(cmd, ignore_retcode=True) if out["retcode"] != 0 and "Error:" in out: return {} @@ -1058,7 +1061,7 @@ def list_downloaded(**kwargs): salt '*' pkg.list_downloaded """ - CACHE_DIR = os.path.join("/var/cache/", _yum()) + CACHE_DIR = os.path.join("/var/cache", _yum()) ret = {} for root, dirnames, filenames in salt.utils.path.os_walk(CACHE_DIR): @@ -1428,8 +1431,8 @@ def install( 'version': '', 'arch': ''}}} """ - if (version := kwargs.get("version")) is not None: - kwargs["version"] = str(version) + if kwargs.get("version") is not None: + kwargs["version"] = str(kwargs["version"]) options = _get_options(**kwargs) if salt.utils.data.is_true(refresh): @@ -1987,7 +1990,7 @@ def upgrade( salt '*' pkg.upgrade security=True exclude='kernel*' """ if _yum() in ("dnf", "dnf5") and not obsoletes: - # for dnf we can just disable obsoletes + # for dnf[5] we can just disable obsoletes _setopt = [ opt for opt in salt.utils.args.split_input(kwargs.pop("setopt", [])) @@ -2079,7 +2082,7 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd - from killing any yum/dnf commands spawned by Salt when the + from killing any yum/dnf[5] commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option @@ -2183,7 +2186,7 @@ def purge(name=None, pkgs=None, **kwargs): # pylint: disable=W0613 On minions running systemd>=205, `systemd-run(1)`_ is now used to isolate commands which modify installed packages from the ``salt-minion`` daemon's control group. This is done to keep systemd - from killing any yum/dnf commands spawned by Salt when the + from killing any yum/dnf[5] commands spawned by Salt when the ``salt-minion`` service is restarted. (see ``KillMode`` in the `systemd.kill(5)`_ manpage for more information). If desired, usage of `systemd-run(1)`_ can be suppressed by setting a :mod:`config option @@ -3324,12 +3327,12 @@ def download(*packages, **kwargs): .. versionadded:: 2015.5.0 Download packages to the local disk. Requires ``yumdownloader`` from - ``yum-utils`` package. + ``yum-utils`` or ``dnf-utils`` package. .. note:: - ``yum-utils`` will already be installed on the minion if the package - was installed from the Fedora / EPEL repositories. + ``yum-utils`` or ``dnf-utils`` will already be installed on the minion + if the package was installed from the EPEL / Fedora repositories. CLI Example: @@ -3344,7 +3347,7 @@ def download(*packages, **kwargs): if not packages: raise SaltInvocationError("No packages were specified") - CACHE_DIR = "/var/cache/yum/packages" + CACHE_DIR = os.path.join("/var/cache", _yum(), "packages") if not os.path.exists(CACHE_DIR): os.makedirs(CACHE_DIR) cached_pkgs = os.listdir(CACHE_DIR) @@ -3525,12 +3528,17 @@ def services_need_restart(**kwargs): salt '*' pkg.services_need_restart """ - if _yum() != "dnf": - raise CommandExecutionError("dnf is required to list outdated services.") + if _yum() not in ("dnf", "dnf5"): + raise CommandExecutionError( + "dnf or dnf5 is required to list outdated services." + ) if not salt.utils.systemd.booted(__context__): raise CommandExecutionError("systemd is required to list outdated services.") - cmd = ["dnf", "--quiet", "needs-restarting"] + if _yum() == "dnf5": + cmd = ["dnf5", "--quiet", "needs-restarting"] + else: + cmd = ["dnf", "--quiet", "needs-restarting"] dnf_output = __salt__["cmd.run_stdout"](cmd, python_shell=False) if not dnf_output: return [] diff --git a/salt/platform/win.py b/salt/platform/win.py index ec63dbe319f..a521fdd974c 100644 --- a/salt/platform/win.py +++ b/salt/platform/win.py @@ -185,7 +185,12 @@ class HANDLE(wintypes.HANDLE): def Close(self, CloseHandle=kernel32.CloseHandle): if self and not getattr(self, "closed", False): - CloseHandle(self.Detach()) + try: + CloseHandle(self.Detach()) + except OSError: + # Suppress the error when there is no handle (WinError 6) + if ctypes.get_last_error() == 6: + pass __del__ = Close diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 46b127bfcf7..91067fc41a3 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -492,9 +492,7 @@ class GitProvider: self._cache_basename = "_" if self.id.startswith("__env__"): try: - self._cache_basename = self.get_checkout_target().replace( - "/", "-" - ) # replace '/' with '-' to not cause trouble with file-system + self._cache_basename = self.get_checkout_target() except AttributeError: log.critical( @@ -2801,6 +2799,33 @@ class GitBase: try: # Find and place fetch_request file for all the other branches for this repo repo_work_hash = os.path.split(repo.get_salt_working_dir())[0] + branches = [ + os.path.relpath(path, repo_work_hash) + for (path, subdirs, files) in os.walk(repo_work_hash) + if not subdirs + ] + + for branch in branches: + # Don't place fetch request in current branch being updated + if branch == repo.get_cache_basename(): + continue + branch_salt_dir = salt.utils.path.join(repo_work_hash, branch) + fetch_path = salt.utils.path.join( + branch_salt_dir, "fetch_request" + ) + if os.path.isdir(branch_salt_dir): + try: + with salt.utils.files.fopen(fetch_path, "w"): + pass + except OSError as exc: # pylint: disable=broad-except + log.error( + "Failed to make fetch request: %s %s", + fetch_path, + exc, + exc_info=True, + ) + else: + log.error("Failed to make fetch request: %s", fetch_path) for branch in os.listdir(repo_work_hash): # Don't place fetch request in current branch being updated if branch == repo.get_cache_basename(): diff --git a/salt/utils/http.py b/salt/utils/http.py index 67fc05ce469..fd296788f46 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -646,7 +646,7 @@ def query( decode_body=decode_body, ) return ret - except (socket.herror, OSError, socket.timeout, socket.gaierror) as exc: + except (socket.herror, OSError, TimeoutError, socket.gaierror) as exc: if status is True: ret["status"] = 0 ret["error"] = str(exc) diff --git a/salt/utils/reactor.py b/salt/utils/reactor.py index 0229738ec3c..cdfc927a35d 100644 --- a/salt/utils/reactor.py +++ b/salt/utils/reactor.py @@ -464,22 +464,42 @@ class ReactWrap: """ Wrap RunnerClient for executing :ref:`runner modules ` """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "runner" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for runner") + low = {"state": "runner"} + self.populate_client_cache(low) return self.pool.fire_async(self.client_cache["runner"].low, args=(fun, kwargs)) def wheel(self, fun, **kwargs): """ Wrap Wheel to enable executing :ref:`wheel modules ` """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "wheel" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for wheel") + low = {"state": "wheel"} + self.populate_client_cache(low) return self.pool.fire_async(self.client_cache["wheel"].low, args=(fun, kwargs)) def local(self, fun, tgt, **kwargs): """ Wrap LocalClient for running :ref:`execution modules ` """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "local" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for local") + low = {"state": "local"} + self.populate_client_cache(low) self.client_cache["local"].cmd_async(tgt, fun, **kwargs) def caller(self, fun, **kwargs): """ Wrap LocalCaller to execute remote exec functions locally on the Minion """ + # pylint: disable=unsupported-membership-test,unsupported-assignment-operation + if "caller" not in self.client_cache: + log.debug("reactor edge case: re-populating client_cache for caller") + low = {"state": "caller"} + self.populate_client_cache(low) self.client_cache["caller"].cmd(fun, *kwargs["arg"], **kwargs["kwarg"]) diff --git a/salt/utils/win_dacl.py b/salt/utils/win_dacl.py index 346dac41de5..70779588262 100644 --- a/salt/utils/win_dacl.py +++ b/salt/utils/win_dacl.py @@ -125,7 +125,7 @@ should match what you see when you look at the properties for an object. - subfolders_only: Applies to all containers beneath this object - files_only: Applies to all file objects beneath this object - .. NOTE:: + .. note:: 'applies to' properties can only be modified on directories. Files will always be ``this_folder_only``. @@ -883,10 +883,7 @@ def dacl(obj_name=None, obj_type="file"): """ # Get the principal from the sid (object sid) sid = win32security.ConvertSidToStringSid(ace[2]) - try: - principal = get_name(sid) - except CommandExecutionError: - principal = sid + principal = get_name(sid) # Get the ace type ace_type = self.ace_type[ace[0][0]] @@ -1194,14 +1191,17 @@ def get_name(principal): principal (str): Find the Normalized name based on this. Can be a PySID object, a SID - string, or a user name in any capitalization. + string, or a username in any capitalization. .. note:: - Searching based on the user name can be slow on hosts connected + Searching based on the username can be slow on hosts connected to large Active Directory domains. Returns: - str: The name that corresponds to the passed principal + str: The username that corresponds to the passed principal. If there is + no corresponding username, the string SID will be returned. + Capability SIDs will return ``None``. + Usage: @@ -1246,7 +1246,7 @@ def get_name(principal): name = f"NT Service\\{name}" return name - except (pywintypes.error, TypeError) as exc: + except pywintypes.error as exc: # Microsoft introduced the concept of Capability SIDs in Windows 8 # https://docs.microsoft.com/en-us/windows/security/identity-protection/access-control/security-identifiers#capability-sids # https://support.microsoft.com/en-us/help/4502539/some-sids-do-not-resolve-into-friendly-names @@ -1254,11 +1254,27 @@ def get_name(principal): # These types of SIDs do not resolve, so we'll just ignore them for this # All capability SIDs begin with `S-1-15-3`, so we'll only throw an # error when the sid does not begin with `S-1-15-3` - if not str_sid.startswith("S-1-15-3"): - message = f'Error resolving "{principal}"' - if type(exc) == pywintypes.error: - win_error = win32api.FormatMessage(exc.winerror).rstrip("\n") - message = f"{message}: {win_error}" + # 1332: No mapping between account names and security IDs was done + if exc.winerror == 1332: + # Capability SID, return None + if str_sid.startswith("S-1-15-3"): + log.debug("Name mapping not available for capability SID: %s", str_sid) + return None + + # User does not exist on the system or is on a disconnected domain + # Return the SID + else: + log.debug( + "Could not resolve SID: %s\nThe user has either been removed " + "from the system or is a domain user and the system is not " + "connected to the domain", + str_sid, + ) + return str_sid + + # Some other unknown error + else: + message = f'Error resolving "{principal}: {exc.strerror}"' log.exception(message) raise CommandExecutionError(message, exc) @@ -2242,13 +2258,19 @@ def _check_perms(obj_name, obj_type, new_perms, access_mode, ret, test_mode=Fals cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type) changes = {} for user in new_perms: - applies_to_text = "" # Check that user exists: - try: - user_name = get_name(principal=user) - except CommandExecutionError: + user_name = get_name(principal=user) + # username will be the SID if there is no corresponding username + if user_name == get_sid_string(principal=user): ret["comment"].append( - '{} Perms: User "{}" missing from Target System'.format( + "{} Perms: Could not find a corresponding username for: {}".format( + access_mode.capitalize(), user + ) + ) + continue + if user_name is None: + ret["comment"].append( + "{} Perms: Skipping Capability SID: {}".format( access_mode.capitalize(), user ) ) @@ -2471,7 +2493,7 @@ def check_perms( log.debug("Resetting permissions for %s", obj_name) cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type) for user_name in cur_perms["Not Inherited"]: - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (grant_perms or {})}: if "grant" in cur_perms["Not Inherited"][user_name]: ret["changes"].setdefault("remove_perms", {}) @@ -2489,7 +2511,7 @@ def check_perms( ret["changes"]["remove_perms"].update( {user_name: cur_perms["Not Inherited"][user_name]} ) - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (deny_perms or {})}: if "deny" in cur_perms["Not Inherited"][user_name]: ret["changes"].setdefault("remove_perms", {}) @@ -2541,7 +2563,7 @@ def check_perms( log.debug("Resetting permissions for %s", obj_name) cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type) for user_name in cur_perms["Not Inherited"]: - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (grant_perms or {})}: if "grant" in cur_perms["Not Inherited"][user_name]: rm_permissions( @@ -2550,7 +2572,7 @@ def check_perms( ace_type="grant", obj_type=obj_type, ) - # case insensitive dictionary search + # case-insensitive dictionary search if user_name not in {get_name(k) for k in (deny_perms or {})}: if "deny" in cur_perms["Not Inherited"][user_name]: rm_permissions( @@ -2582,14 +2604,9 @@ def _set_perms(obj_dacl, obj_type, new_perms, cur_perms, access_mode): ret = {} for user in new_perms: # Check that user exists: - try: - user_name = get_name(user) - except CommandExecutionError: - log.debug( - '%s Perms: User "%s" missing from Target System', - access_mode.capitalize(), - user, - ) + user_name = get_name(user) + # We want to skip unmapped usernames + if user_name == get_sid_string(user): continue # Get applies_to diff --git a/salt/utils/win_runas.py b/salt/utils/win_runas.py index fc8c9c82be5..aa2df51dbfc 100644 --- a/salt/utils/win_runas.py +++ b/salt/utils/win_runas.py @@ -187,8 +187,10 @@ def runas(cmdLine, username, password=None, cwd=None): | win32process.CREATE_SUSPENDED ) + flags = win32con.STARTF_USESTDHANDLES + flags |= win32con.STARTF_USESHOWWINDOW startup_info = salt.platform.win.STARTUPINFO( - dwFlags=win32con.STARTF_USESTDHANDLES, + dwFlags=flags, hStdInput=stdin_read.handle, hStdOutput=stdout_write.handle, hStdError=stderr_write.handle, @@ -197,6 +199,9 @@ def runas(cmdLine, username, password=None, cwd=None): # Create the environment for the user env = create_env(user_token, False) + if "&&" in cmdLine: + cmdLine = f'cmd /c "{cmdLine}"' + hProcess = None try: # Start the process in a suspended state. @@ -286,13 +291,18 @@ def runas_unpriv(cmd, username, password, cwd=None): dupin = salt.platform.win.DuplicateHandle(srchandle=stdin, inherit=True) # Get startup info structure + flags = win32con.STARTF_USESTDHANDLES + flags |= win32con.STARTF_USESHOWWINDOW startup_info = salt.platform.win.STARTUPINFO( - dwFlags=win32con.STARTF_USESTDHANDLES, + dwFlags=flags, hStdInput=dupin, hStdOutput=c2pwrite, hStdError=errwrite, ) + if "&&" in cmd: + cmd = f'cmd /c "{cmd}"' + try: # Run command and return process info structure process_info = salt.platform.win.CreateProcessWithLogonW( diff --git a/tests/pytests/functional/modules/cmd/test_run_win.py b/tests/pytests/functional/modules/cmd/test_run_win.py new file mode 100644 index 00000000000..cf41eb50280 --- /dev/null +++ b/tests/pytests/functional/modules/cmd/test_run_win.py @@ -0,0 +1,50 @@ +import pytest + +pytestmark = [ + pytest.mark.core_test, + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture(scope="module") +def account(): + with pytest.helpers.create_account() as _account: + yield _account + + +@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows") +@pytest.mark.parametrize( + "exit_code, return_code, result", + [ + (300, 0, True), + (299, 299, False), + ], +) +def test_windows_script_exitcode(modules, state_tree, exit_code, return_code, result): + ret = modules.state.single( + "cmd.run", name=f"cmd.exe /c exit {exit_code}", success_retcodes=[2, 44, 300] + ) + assert ret.result is result + assert ret.filtered["changes"]["retcode"] == return_code + + +@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows") +@pytest.mark.parametrize( + "exit_code, return_code, result", + [ + (300, 0, True), + (299, 299, False), + ], +) +def test_windows_script_exitcode_runas( + modules, state_tree, exit_code, return_code, result, account +): + ret = modules.state.single( + "cmd.run", + name=f"cmd.exe /c exit {exit_code}", + success_retcodes=[2, 44, 300], + runas=account.username, + password=account.password, + ) + assert ret.result is result + assert ret.filtered["changes"]["retcode"] == return_code diff --git a/tests/pytests/functional/modules/cmd/test_script.py b/tests/pytests/functional/modules/cmd/test_script.py index dcdd632fa70..9cd8fa85e08 100644 --- a/tests/pytests/functional/modules/cmd/test_script.py +++ b/tests/pytests/functional/modules/cmd/test_script.py @@ -13,6 +13,17 @@ def cmd(modules): return modules.cmd +@pytest.fixture(scope="module") +def exitcode_script(state_tree): + exit_code = 12345 + script_contents = f""" + Write-Host "Expected exit code: {exit_code}" + exit {exit_code} + """ + with pytest.helpers.temp_file("exit_code.ps1", script_contents, state_tree): + yield exit_code + + @pytest.fixture(params=["powershell", "pwsh"]) def shell(request): """ @@ -85,3 +96,9 @@ def test_windows_script_args_powershell_runas(cmd, shell, account, issue_56195): ) assert ret["stdout"] == password + + +@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows") +def test_windows_script_exitcode(cmd, shell, exitcode_script): + ret = cmd.script("salt://exit_code.ps1", shell=shell, saltenv="base") + assert ret["retcode"] == exitcode_script diff --git a/tests/pytests/functional/utils/test_win_runas.py b/tests/pytests/functional/utils/test_win_runas.py new file mode 100644 index 00000000000..b6bdabec375 --- /dev/null +++ b/tests/pytests/functional/utils/test_win_runas.py @@ -0,0 +1,56 @@ +""" +Test the win_runas util +""" + +import pytest + +import salt.utils.win_runas as win_runas + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, +] + + +@pytest.fixture +def user(): + with pytest.helpers.create_account() as account: + yield account + + +@pytest.mark.parametrize( + "cmd, expected", + [ + ("hostname && whoami", "username"), + ("hostname && echo foo", "foo"), + ("hostname && python --version", "Python"), + ], +) +def test_compound_runas(user, cmd, expected): + if expected == "username": + expected = user.username + result = win_runas.runas( + cmdLine=cmd, + username=user.username, + password=user.password, + ) + assert expected in result["stdout"] + + +@pytest.mark.parametrize( + "cmd, expected", + [ + ("hostname && whoami", "username"), + ("hostname && echo foo", "foo"), + ("hostname && python --version", "Python"), + ], +) +def test_compound_runas_unpriv(user, cmd, expected): + if expected == "username": + expected = user.username + result = win_runas.runas_unpriv( + cmd=cmd, + username=user.username, + password=user.password, + ) + assert expected in result["stdout"] diff --git a/tests/pytests/functional/utils/win_dacl/test_get_name.py b/tests/pytests/functional/utils/win_dacl/test_get_name.py index f35c1336ec4..3f3f1e33901 100644 --- a/tests/pytests/functional/utils/win_dacl/test_get_name.py +++ b/tests/pytests/functional/utils/win_dacl/test_get_name.py @@ -9,9 +9,11 @@ import pytest import salt.exceptions import salt.utils.win_dacl +from tests.support.mock import patch # Third-party libs try: + import pywintypes import win32security HAS_WIN32 = True @@ -84,12 +86,22 @@ def test_get_name_capability_sid(): assert salt.utils.win_dacl.get_name(sid_obj) is None -def test_get_name_error(): +def test_get_name_unmapped_sid(): """ - Test get_name with an un mapped SID, should throw a CommandExecutionError + Test get_name with an un mapped SID, should return the passed sid """ test_sid = "S-1-2-3-4" sid_obj = win32security.ConvertStringSidToSid(test_sid) - with pytest.raises(salt.exceptions.CommandExecutionError) as exc: - salt.utils.win_dacl.get_name(sid_obj) - assert "No mapping between account names" in exc.value.message + assert salt.utils.win_dacl.get_name(sid_obj) == test_sid + + +def test_get_name_error(): + """ + Test get_name with an unexpected error, should throw a CommandExecutionError + """ + test_sid = "S-1-2-3-4" + sid_obj = win32security.ConvertStringSidToSid(test_sid) + with patch("win32security.LookupAccountSid", side_effect=pywintypes.error): + with pytest.raises(salt.exceptions.CommandExecutionError) as exc: + salt.utils.win_dacl.get_name(sid_obj) + assert "Error resolving" in exc.value.message diff --git a/tests/pytests/integration/minion/conftest.py b/tests/pytests/integration/minion/conftest.py new file mode 100644 index 00000000000..ee64a9ffca4 --- /dev/null +++ b/tests/pytests/integration/minion/conftest.py @@ -0,0 +1,103 @@ +import os +import shutil +import subprocess + +import pytest + +import salt.utils.platform +from tests.conftest import FIPS_TESTRUN + + +@pytest.fixture +def salt_master_1(request, salt_factories): + config_defaults = { + "open_mode": True, + "transport": request.config.getoption("--transport"), + } + config_overrides = { + "interface": "127.0.0.1", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + } + + factory = salt_factories.salt_master_daemon( + "master-1", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + +@pytest.fixture +def salt_master_2(salt_factories, salt_master_1): + if salt.utils.platform.is_darwin() or salt.utils.platform.is_freebsd(): + subprocess.check_output(["ifconfig", "lo0", "alias", "127.0.0.2", "up"]) + + config_defaults = { + "open_mode": True, + "transport": salt_master_1.config["transport"], + } + config_overrides = { + "interface": "127.0.0.2", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + } + + # Use the same ports for both masters, they are binding to different interfaces + for key in ( + "ret_port", + "publish_port", + ): + config_overrides[key] = salt_master_1.config[key] + factory = salt_factories.salt_master_daemon( + "master-2", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + + # The secondary salt master depends on the primarily salt master fixture + # because we need to clone the keys + for keyfile in ("master.pem", "master.pub"): + shutil.copyfile( + os.path.join(salt_master_1.config["pki_dir"], keyfile), + os.path.join(factory.config["pki_dir"], keyfile), + ) + with factory.started(start_timeout=120): + yield factory + + +@pytest.fixture +def salt_minion_1(salt_master_1, salt_master_2): + config_defaults = { + "transport": salt_master_1.config["transport"], + } + + master_1_port = salt_master_1.config["ret_port"] + master_1_addr = salt_master_1.config["interface"] + master_2_port = salt_master_2.config["ret_port"] + master_2_addr = salt_master_2.config["interface"] + config_overrides = { + "master": [ + f"{master_1_addr}:{master_1_port}", + f"{master_2_addr}:{master_2_port}", + ], + "test.foo": "baz", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } + factory = salt_master_1.salt_minion_daemon( + "minion-1", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory diff --git a/tests/pytests/integration/minion/test_job_return.py b/tests/pytests/integration/minion/test_job_return.py index c91748597dc..e4a76ae897c 100644 --- a/tests/pytests/integration/minion/test_job_return.py +++ b/tests/pytests/integration/minion/test_job_return.py @@ -1,107 +1,5 @@ -import os -import shutil -import subprocess - import pytest -import salt.utils.platform -from tests.conftest import FIPS_TESTRUN - - -@pytest.fixture -def salt_master_1(request, salt_factories): - config_defaults = { - "open_mode": True, - "transport": request.config.getoption("--transport"), - } - config_overrides = { - "interface": "127.0.0.1", - "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), - } - - factory = salt_factories.salt_master_daemon( - "master-1", - defaults=config_defaults, - overrides=config_overrides, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - ) - with factory.started(start_timeout=120): - yield factory - - -@pytest.fixture -def salt_master_2(salt_factories, salt_master_1): - if salt.utils.platform.is_darwin() or salt.utils.platform.is_freebsd(): - subprocess.check_output(["ifconfig", "lo0", "alias", "127.0.0.2", "up"]) - - config_defaults = { - "open_mode": True, - "transport": salt_master_1.config["transport"], - } - config_overrides = { - "interface": "127.0.0.2", - "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), - } - - # Use the same ports for both masters, they are binding to different interfaces - for key in ( - "ret_port", - "publish_port", - ): - config_overrides[key] = salt_master_1.config[key] - factory = salt_factories.salt_master_daemon( - "master-2", - defaults=config_defaults, - overrides=config_overrides, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - ) - - # The secondary salt master depends on the primarily salt master fixture - # because we need to clone the keys - for keyfile in ("master.pem", "master.pub"): - shutil.copyfile( - os.path.join(salt_master_1.config["pki_dir"], keyfile), - os.path.join(factory.config["pki_dir"], keyfile), - ) - with factory.started(start_timeout=120): - yield factory - - -@pytest.fixture -def salt_minion_1(salt_master_1, salt_master_2): - config_defaults = { - "transport": salt_master_1.config["transport"], - } - - master_1_port = salt_master_1.config["ret_port"] - master_1_addr = salt_master_1.config["interface"] - master_2_port = salt_master_2.config["ret_port"] - master_2_addr = salt_master_2.config["interface"] - config_overrides = { - "master": [ - f"{master_1_addr}:{master_1_port}", - f"{master_2_addr}:{master_2_port}", - ], - "test.foo": "baz", - "fips_mode": FIPS_TESTRUN, - "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", - "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", - } - factory = salt_master_1.salt_minion_daemon( - "minion-1", - defaults=config_defaults, - overrides=config_overrides, - extra_cli_arguments_after_first_start_failure=["--log-level=info"], - ) - with factory.started(start_timeout=120): - yield factory - @pytest.mark.timeout_unless_on_windows(360) def test_job_return(salt_master_1, salt_master_2, salt_minion_1): diff --git a/tests/pytests/integration/minion/test_start_event.py b/tests/pytests/integration/minion/test_start_event.py new file mode 100644 index 00000000000..b6f1406e701 --- /dev/null +++ b/tests/pytests/integration/minion/test_start_event.py @@ -0,0 +1,23 @@ +import time + +import pytest + + +@pytest.fixture +def start(): + return time.time() + + +def test_minion_start_event( + start, event_listener, salt_master_1, salt_master_2, salt_minion_1 +): + start_events = event_listener.wait_for_events( + [ + (salt_master_1.id, f"salt/minion/{salt_minion_1.id}/start"), + (salt_master_2.id, f"salt/minion/{salt_minion_1.id}/start"), + ], + timeout=60, + after_time=start, + ) + assert not start_events.missed + assert len(start_events.matches) == 2 diff --git a/tests/pytests/integration/minion/test_startup_states.py b/tests/pytests/integration/minion/test_startup_states.py new file mode 100644 index 00000000000..d3bc2204161 --- /dev/null +++ b/tests/pytests/integration/minion/test_startup_states.py @@ -0,0 +1,114 @@ +"""Test minion configuration option startup_states. + +There are four valid values for this option, which are validated by checking the jobs +executed after minion start. +""" + +import pytest + + +@pytest.fixture +def salt_minion_startup_states_empty_string(salt_master, salt_minion_id): + config_overrides = { + "startup_states": "", + } + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-empty-string", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +@pytest.fixture +def salt_minion_startup_states_highstate(salt_master, salt_minion_id): + config_overrides = { + "startup_states": "highstate", + } + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-highstate", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +@pytest.fixture +def salt_minion_startup_states_sls(salt_master, salt_minion_id): + config_overrides = {"startup_states": "sls", "sls_list": ["example-sls"]} + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-sls", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +@pytest.fixture +def salt_minion_startup_states_top(salt_master, salt_minion_id): + config_overrides = {"startup_states": "top", "top_file": "example-top.sls"} + factory = salt_master.salt_minion_daemon( + f"{salt_minion_id}-top", + overrides=config_overrides, + ) + with factory.started(): + yield factory + + +def test_startup_states_empty_string( + salt_run_cli, salt_minion_startup_states_empty_string +): + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_empty_string.id}" + ) + # Check no job was run + assert len(ret.data.keys()) == 0 + + +def test_startup_states_highstate(salt_run_cli, salt_minion_startup_states_highstate): + with salt_minion_startup_states_highstate: + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_highstate.id}" + ) + # Check there is exactly one job + assert len(ret.data.keys()) == 1 + # Check that job executes state.highstate + job_ret = next(iter(ret.data.values())) + assert "Function" in job_ret + assert job_ret["Function"] == "state.highstate" + assert "Arguments" in job_ret + assert job_ret["Arguments"] == [] + + +def test_startup_states_sls(salt_run_cli, salt_minion_startup_states_sls): + with salt_minion_startup_states_sls: + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_sls.id}" + ) + # Check there is exactly one job + assert len(ret.data.keys()) == 1 + # Check that job executes state.sls + job_ret = next(iter(ret.data.values())) + assert "Function" in job_ret + assert job_ret["Function"] == "state.sls" + assert "Arguments" in job_ret + assert job_ret["Arguments"] == [["example-sls"]] + + +def test_startup_states_top(salt_run_cli, salt_minion_startup_states_top): + with salt_minion_startup_states_top: + # Get jobs for this minion + ret = salt_run_cli.run( + "jobs.list_jobs", f"search_target={salt_minion_startup_states_top.id}" + ) + # Check there is exactly one job + assert len(ret.data.keys()) == 1 + # Check that job executes state.top + job_ret = next(iter(ret.data.values())) + assert "Function" in job_ret + assert job_ret["Function"] == "state.top" + assert "Arguments" in job_ret + assert job_ret["Arguments"] == ["example-top.sls"] diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py index 023f6110b64..df6ddaeb5a6 100644 --- a/tests/pytests/unit/grains/test_core.py +++ b/tests/pytests/unit/grains/test_core.py @@ -1847,6 +1847,37 @@ def test_lxc_virtual_with_virt_what(): assert ret["virtual_subtype"] == "LXC" +@pytest.mark.skip_on_windows +def test_podman_virtual_with_systemd_detect_virt(): + """ + Test if virtual grains are parsed correctly in Podman using systemd-detect-virt. + """ + + def _which_side_effect(path): + if path == "systemd-detect-virt": + return "/usr/bin/systemd-detect-virt" + return None + + with patch.object( + salt.utils.platform, "is_windows", MagicMock(return_value=False) + ), patch.object( + salt.utils.path, + "which", + MagicMock(return_value=True, side_effect=_which_side_effect), + ), patch.dict( + core.__salt__, + { + "cmd.run_all": MagicMock( + return_value={"pid": 78, "retcode": 0, "stderr": "", "stdout": "podman"} + ) + }, + ): + osdata = {"kernel": "test"} + ret = core._virtual(osdata) + assert ret["virtual"] == "container" + assert ret["virtual_subtype"] == "Podman" + + @pytest.mark.skip_on_windows def test_container_inside_virtual_machine(): """ diff --git a/tests/pytests/unit/modules/test_cmdmod.py b/tests/pytests/unit/modules/test_cmdmod.py index e1f2a604cd1..5501d8a0517 100644 --- a/tests/pytests/unit/modules/test_cmdmod.py +++ b/tests/pytests/unit/modules/test_cmdmod.py @@ -24,6 +24,11 @@ from salt.exceptions import CommandExecutionError from tests.support.mock import MagicMock, Mock, MockTimedProc, mock_open, patch from tests.support.runtests import RUNTIME_VARS +pytestmark = [ + pytest.mark.core_test, + pytest.mark.windows_whitelisted, +] + DEFAULT_SHELL = "foo/bar" MOCK_SHELL_FILE = "# List of acceptable shells\n\n/bin/bash\n" @@ -1052,6 +1057,7 @@ def test_runas_env_sudo_group(bundled): ) +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd_no_powershell(): with pytest.raises(CommandExecutionError): cmdmod._prep_powershell_cmd( @@ -1064,8 +1070,10 @@ def test_prep_powershell_cmd_no_powershell(): [ ("Write-Host foo", "& Write-Host foo"), ("$PSVersionTable", "$PSVersionTable"), + ("try {this} catch {that}", "try {this} catch {that}"), ], ) +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd(cmd, parsed): """ Tests _prep_powershell_cmd returns correct cmd @@ -1089,6 +1097,7 @@ def test_prep_powershell_cmd(cmd, parsed): assert ret == expected +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd_encoded(): """ Tests _prep_powershell_cmd returns correct cmd when encoded_cmd=True @@ -1114,6 +1123,7 @@ def test_prep_powershell_cmd_encoded(): assert ret == expected +@pytest.mark.skip_unless_on_windows def test_prep_powershell_cmd_script(): """ Tests _prep_powershell_cmd returns correct cmd when called from cmd.script @@ -1133,7 +1143,7 @@ def test_prep_powershell_cmd_script(): "-ExecutionPolicy", "Bypass", "-Command", - f"& {script}", + f"& {script}; exit $LASTEXITCODE", ] assert ret == expected @@ -1147,6 +1157,7 @@ def test_prep_powershell_cmd_script(): ('{"foo": "bar"}', '{"foo": "bar"}'), # Should leave unchanged ], ) +@pytest.mark.skip_unless_on_windows def test_prep_powershell_json(text, expected): """ Make sure the output is valid json diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index 179f3113d55..04fa56986eb 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -874,7 +874,7 @@ def test_list_upgrades_dnf(): "--enablerepo=good", "--branch=foo", "list", - "upgrades", + "--upgrades", ], env={}, output_loglevel="trace", @@ -899,7 +899,7 @@ def test_list_upgrades_dnf(): "--enablerepo=good", "--branch=foo", "list", - "upgrades", + "--upgrades", ], env={}, output_loglevel="trace", @@ -926,7 +926,7 @@ def test_list_downloaded(): mock_walk = MagicMock( return_value=[ ( - "/var/cache/yum", + os.path.join("/var/cache", yumpkg._yum()), [], ["pkg1-3.1-16.1.x86_64.rpm", "pkg2-1.2-13.2.x86_64.rpm"], ) @@ -955,7 +955,9 @@ def test_list_downloaded(): "3.1": { "creation_date_time": "2023-10-05T14:01:22", "creation_date_time_t": 1696536082, - "path": "/var/cache/yum/pkg1-3.1-16.1.x86_64.rpm", + "path": os.path.join( + "/var/cache", yumpkg._yum(), "pkg1-3.1-16.1.x86_64.rpm" + ), "size": 75701688, }, }, @@ -963,7 +965,9 @@ def test_list_downloaded(): "1.2": { "creation_date_time": "2023-10-05T14:01:22", "creation_date_time_t": 1696536082, - "path": "/var/cache/yum/pkg2-1.2-13.2.x86_64.rpm", + "path": os.path.join( + "/var/cache", yumpkg._yum(), "pkg2-1.2-13.2.x86_64.rpm" + ), "size": 75701688, }, }, @@ -1150,11 +1154,12 @@ def test_download(): patch_salt = patch.dict(yumpkg.__salt__, dict_salt) with patch_which, patch_exists, patch_makedirs, patch_listdir, patch_salt: result = yumpkg.download("spongebob") - cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + cache_dir = os.path.join("/var/cache", yumpkg._yum(), "packages") + cmd = ["yumdownloader", "-q", f"--destdir={cache_dir}", "spongebob"] mock_run.assert_called_once_with( cmd, output_loglevel="trace", python_shell=False ) - expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + expected = {"spongebob": f"{cache_dir}/spongebob-1.2.rpm"} assert result == expected @@ -1171,10 +1176,11 @@ def test_download_failed(): patch_salt = patch.dict(yumpkg.__salt__, dict_salt) with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: result = yumpkg.download("spongebob", "patrick") + cache_dir = os.path.join("/var/cache", yumpkg._yum(), "packages") cmd = [ "yumdownloader", "-q", - "--destdir=/var/cache/yum/packages", + f"--destdir={cache_dir}", "spongebob", "patrick", ] @@ -1183,7 +1189,7 @@ def test_download_failed(): ) expected = { "_error": "The following package(s) failed to download: patrick", - "spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm", + "spongebob": f"{cache_dir}/spongebob-1.2.rpm", } assert result == expected @@ -1207,11 +1213,12 @@ def test_download_to_purge(): patch_salt = patch.dict(yumpkg.__salt__, dict_salt) with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: result = yumpkg.download("spongebob") - cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + cache_dir = os.path.join("/var/cache", yumpkg._yum(), "packages") + cmd = ["yumdownloader", "-q", f"--destdir={cache_dir}", "spongebob"] mock_run.assert_called_once_with( cmd, output_loglevel="trace", python_shell=False ) - expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + expected = {"spongebob": f"{cache_dir}/spongebob-1.2.rpm"} assert result == expected @@ -3163,6 +3170,15 @@ def test_services_need_restart_no_dnf_output(): assert yumpkg.services_need_restart() == [] +def test_services_need_restart_no_dnf5_output(): + patch_yum = patch("salt.modules.yumpkg._yum", Mock(return_value="dnf5")) + patch_booted = patch("salt.utils.systemd.booted", Mock(return_value=True)) + mock_run_stdout = MagicMock(return_value="") + patch_run_stdout = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_run_stdout}) + with patch_yum, patch_booted, patch_run_stdout: + assert yumpkg.services_need_restart() == [] + + def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs(): patch_list_pkgs = patch( "salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py index 85dac2a0ae7..c3605291370 100644 --- a/tests/pytests/unit/test_minion.py +++ b/tests/pytests/unit/test_minion.py @@ -515,7 +515,7 @@ async def test_process_count_max(minion_opts, io_loop): @pytest.mark.slow_test -def test_beacons_before_connect(minion_opts): +async def test_beacons_before_connect(minion_opts): """ Tests that the 'beacons_before_connect' option causes the beacons to be initialized before connect. """ @@ -535,7 +535,7 @@ def test_beacons_before_connect(minion_opts): try: try: - minion.tune_in(start=True) + await minion.tune_in(start=True) except RuntimeError: pass @@ -547,7 +547,7 @@ def test_beacons_before_connect(minion_opts): @pytest.mark.slow_test -def test_scheduler_before_connect(minion_opts): +async def test_scheduler_before_connect(minion_opts): """ Tests that the 'scheduler_before_connect' option causes the scheduler to be initialized before connect. """ @@ -566,7 +566,7 @@ def test_scheduler_before_connect(minion_opts): minion = salt.minion.Minion(minion_opts, io_loop=io_loop) try: try: - minion.tune_in(start=True) + await minion.tune_in(start=True) except RuntimeError: pass @@ -636,7 +636,7 @@ def test_minion_module_refresh_beacons_refresh(minion_opts): @pytest.mark.slow_test -def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_callbacks( +async def test_when_ping_interval_is_set_the_callback_should_be_added_to_periodic_callbacks( minion_opts, ): with patch("salt.minion.Minion.ctx", MagicMock(return_value={})), patch( diff --git a/tests/unit/utils/test_reactor.py b/tests/pytests/unit/utils/test_reactor2.py similarity index 59% rename from tests/unit/utils/test_reactor.py rename to tests/pytests/unit/utils/test_reactor2.py index 4123bdfb9b2..0e1c32d950f 100644 --- a/tests/unit/utils/test_reactor.py +++ b/tests/pytests/unit/utils/test_reactor2.py @@ -4,14 +4,17 @@ import logging import os import textwrap +import pytest + import salt.loader +import salt.template import salt.utils.data import salt.utils.files import salt.utils.reactor as reactor import salt.utils.yaml -from tests.support.mixins import AdaptedConfigurationTestCaseMixin from tests.support.mock import MagicMock, Mock, mock_open, patch -from tests.support.unit import TestCase + +log = logging.getLogger(__name__) REACTOR_CONFIG = """\ reactor: @@ -143,9 +146,6 @@ SLS = { } LOW_CHUNKS = { - # Note that the "name" value in the chunk has been overwritten by the - # "name" argument in the SLS. This is one reason why the new schema was - # needed. "old_runner": [ { "state": "runner", @@ -183,7 +183,7 @@ LOW_CHUNKS = { ], "old_cmd": [ { - "state": "local", # 'cmd' should be aliased to 'local' + "state": "local", "__id__": "install_zsh", "name": "install_zsh", "__sls__": "/srv/reactor/old_cmd.sls", @@ -379,173 +379,207 @@ WRAPPER_CALLS = { "new_caller": {"args": ("file.touch",), "kwargs": {"name": "/tmp/foo"}}, } -log = logging.getLogger(__name__) + +# ----------------------------------------------------------------------------- +# FIXTURES +# ----------------------------------------------------------------------------- +@pytest.fixture +def react_master_opts(master_opts): + opts = { + # Minimal stand-in for a real master config + "file_roots": {"base": []}, + "renderer": "jinja|yaml", + } + master_opts.update(opts) + # Optionally parse the reactor config for convenience + reactor_config = salt.utils.yaml.safe_load(REACTOR_CONFIG) + master_opts.update(reactor_config) + return master_opts -class TestReactor(TestCase, AdaptedConfigurationTestCaseMixin): +@pytest.fixture +def test_reactor(react_master_opts): """ - Tests for constructing the low chunks to be executed via the Reactor + Create a Reactor instance for testing """ - - @classmethod - def setUpClass(cls): - """ - Load the reactor config for mocking - """ - cls.opts = cls.get_temp_config("master") - reactor_config = salt.utils.yaml.safe_load(REACTOR_CONFIG) - cls.opts.update(reactor_config) - cls.reactor = reactor.Reactor(cls.opts) - cls.reaction_map = salt.utils.data.repack_dictlist(reactor_config["reactor"]) - renderers = salt.loader.render(cls.opts, {}) - cls.render_pipe = [(renderers[x], "") for x in ("jinja", "yaml")] - - @classmethod - def tearDownClass(cls): - del cls.opts - del cls.reactor - del cls.render_pipe - - def test_list_reactors(self): - """ - Ensure that list_reactors() returns the correct list of reactor SLS - files for each tag. - """ - for schema in ("old", "new"): - for rtype in REACTOR_DATA: - tag = "_".join((schema, rtype)) - self.assertEqual( - self.reactor.list_reactors(tag), self.reaction_map[tag] - ) - - def test_reactions(self): - """ - Ensure that the correct reactions are built from the configured SLS - files and tag data. - """ - for schema in ("old", "new"): - for rtype in REACTOR_DATA: - tag = "_".join((schema, rtype)) - log.debug("test_reactions: processing %s", tag) - reactors = self.reactor.list_reactors(tag) - log.debug("test_reactions: %s reactors: %s", tag, reactors) - # No globbing in our example SLS, and the files don't actually - # exist, so mock glob.glob to just return back the path passed - # to it. - with patch.object(glob, "glob", MagicMock(side_effect=lambda x: [x])): - # The below four mocks are all so that - # salt.template.compile_template() will read the templates - # we've mocked up in the SLS global variable above. - with patch.object(os.path, "isfile", MagicMock(return_value=True)): - with patch.object( - salt.utils.files, "is_empty", MagicMock(return_value=False) - ): - with patch.object( - codecs, "open", mock_open(read_data=SLS[reactors[0]]) - ): - with patch.object( - salt.template, - "template_shebang", - MagicMock(return_value=self.render_pipe), - ): - reactions = self.reactor.reactions( - tag, - REACTOR_DATA[rtype], - reactors, - ) - log.debug( - "test_reactions: %s reactions: %s", - tag, - reactions, - ) - self.assertEqual(reactions, LOW_CHUNKS[tag]) + return reactor.Reactor(react_master_opts) -class TestReactWrap(TestCase, AdaptedConfigurationTestCaseMixin): +@pytest.fixture +def reaction_map(react_master_opts): """ - Tests that we are formulating the wrapper calls properly + Reaction map from the configured reactor """ + return salt.utils.data.repack_dictlist(react_master_opts["reactor"]) - @classmethod - def setUpClass(cls): - cls.wrap = reactor.ReactWrap(cls.get_temp_config("master")) - @classmethod - def tearDownClass(cls): - del cls.wrap +@pytest.fixture +def render_pipe(react_master_opts): + """ + Render pipeline + """ + renderers = salt.loader.render(react_master_opts, {}) + return [(renderers[x], "") for x in ("jinja", "yaml")] - def test_runner(self): - """ - Test runner reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "runner")) - chunk = LOW_CHUNKS[tag][0] - thread_pool = Mock() - thread_pool.fire_async = Mock() - with patch.object(self.wrap, "pool", thread_pool): - self.wrap.run(chunk) - thread_pool.fire_async.assert_called_with( - self.wrap.client_cache["runner"].low, args=WRAPPER_CALLS[tag] - ) - def test_wheel(self): - """ - Test wheel reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "wheel")) - chunk = LOW_CHUNKS[tag][0] - thread_pool = Mock() - thread_pool.fire_async = Mock() - with patch.object(self.wrap, "pool", thread_pool): - self.wrap.run(chunk) - thread_pool.fire_async.assert_called_with( - self.wrap.client_cache["wheel"].low, args=WRAPPER_CALLS[tag] - ) +# ----------------------------------------------------------------------------- +# TESTS for Reactor building the low chunks +# ----------------------------------------------------------------------------- +@pytest.mark.parametrize("schema", ["old", "new"]) +@pytest.mark.parametrize("rtype", list(REACTOR_DATA.keys())) +def test_reactor_reactions(schema, rtype, test_reactor, render_pipe): + """ + Ensure correct reactions are built from the configured SLS files and tag data. + """ + tag = f"{schema}_{rtype}" + reactors_list = test_reactor.list_reactors(tag) - def test_local(self): - """ - Test local reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "local")) - chunk = LOW_CHUNKS[tag][0] - client_cache = {"local": Mock()} - client_cache["local"].cmd_async = Mock() - with patch.object(self.wrap, "client_cache", client_cache): - self.wrap.run(chunk) - client_cache["local"].cmd_async.assert_called_with( - *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] - ) + # Patch out globbing since these SLS files don't actually exist on disk + with patch.object(glob, "glob", MagicMock(side_effect=lambda x: [x])): + with patch.object(os.path, "isfile", MagicMock(return_value=True)): + with patch.object( + salt.utils.files, "is_empty", MagicMock(return_value=False) + ): + with patch.object( + codecs, "open", mock_open(read_data=SLS[reactors_list[0]]) + ): + with patch.object( + salt.template, + "template_shebang", + MagicMock(return_value=render_pipe), + ): + reactions = test_reactor.reactions( + tag, REACTOR_DATA[rtype], reactors_list + ) + assert reactions == LOW_CHUNKS[tag], f"Reactions did not match for tag: {tag}" - def test_cmd(self): - """ - Test cmd reactions (alias for 'local') using both the old and new - config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "cmd")) - chunk = LOW_CHUNKS[tag][0] - client_cache = {"local": Mock()} - client_cache["local"].cmd_async = Mock() - with patch.object(self.wrap, "client_cache", client_cache): - self.wrap.run(chunk) - client_cache["local"].cmd_async.assert_called_with( - *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] - ) - def test_caller(self): - """ - Test caller reactions using both the old and new config schema - """ - for schema in ("old", "new"): - tag = "_".join((schema, "caller")) - chunk = LOW_CHUNKS[tag][0] - client_cache = {"caller": Mock()} - client_cache["caller"].cmd = Mock() - with patch.object(self.wrap, "client_cache", client_cache): - self.wrap.run(chunk) - client_cache["caller"].cmd.assert_called_with( - *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] - ) +def test_list_reactors(test_reactor, reaction_map): + """ + Ensure list_reactors() returns the correct list of reactor SLS files for each tag. + """ + for schema in ("old", "new"): + for rtype in REACTOR_DATA: + tag = f"{schema}_{rtype}" + assert test_reactor.list_reactors(tag) == reaction_map[tag] + + +# ----------------------------------------------------------------------------- +# FIXTURE for Reactor Wrap +# ----------------------------------------------------------------------------- +@pytest.fixture +def react_wrap(react_master_opts): + """ + Create a ReactWrap instance + """ + return reactor.ReactWrap(react_master_opts) + + +# ----------------------------------------------------------------------------- +# TESTS for ReactWrap +# ----------------------------------------------------------------------------- +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_runner(schema, react_wrap): + """ + Test runner reactions using both the old and new config schema + """ + tag = f"{schema}_runner" + chunk = LOW_CHUNKS[tag][0] + thread_pool = Mock() + thread_pool.fire_async = Mock() + with patch.object(react_wrap, "pool", thread_pool): + react_wrap.run(chunk) + thread_pool.fire_async.assert_called_with( + react_wrap.client_cache["runner"].low, + args=WRAPPER_CALLS[tag], + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_wheel(schema, react_wrap): + """ + Test wheel reactions using both the old and new config schema + """ + tag = f"{schema}_wheel" + chunk = LOW_CHUNKS[tag][0] + thread_pool = Mock() + thread_pool.fire_async = Mock() + with patch.object(react_wrap, "pool", thread_pool): + react_wrap.run(chunk) + thread_pool.fire_async.assert_called_with( + react_wrap.client_cache["wheel"].low, + args=WRAPPER_CALLS[tag], + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_local(schema, react_wrap): + """ + Test local reactions using both the old and new config schema + """ + tag = f"{schema}_local" + chunk = LOW_CHUNKS[tag][0] + client_cache = {"local": Mock()} + client_cache["local"].cmd_async = Mock() + with patch.object(react_wrap, "client_cache", client_cache): + react_wrap.run(chunk) + client_cache["local"].cmd_async.assert_called_with( + *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_cmd(schema, react_wrap): + """ + Test cmd reactions (alias for 'local') using both the old and new config schema + """ + tag = f"{schema}_cmd" + chunk = LOW_CHUNKS[tag][0] + client_cache = {"local": Mock()} + client_cache["local"].cmd_async = Mock() + with patch.object(react_wrap, "client_cache", client_cache): + react_wrap.run(chunk) + client_cache["local"].cmd_async.assert_called_with( + *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] + ) + + +@pytest.mark.parametrize("schema", ["old", "new"]) +def test_caller(schema, react_wrap): + """ + Test caller reactions using both the old and new config schema + """ + tag = f"{schema}_caller" + chunk = LOW_CHUNKS[tag][0] + client_cache = {"caller": Mock()} + client_cache["caller"].cmd = Mock() + with patch.object(react_wrap, "client_cache", client_cache): + react_wrap.run(chunk) + client_cache["caller"].cmd.assert_called_with( + *WRAPPER_CALLS[tag]["args"], **WRAPPER_CALLS[tag]["kwargs"] + ) + + +@pytest.mark.parametrize("file_client", ["runner", "wheel"]) +def test_client_cache_missing_key(file_client, react_wrap): + """ + Test client_cache file_client missing, gets repopulated + """ + client_cache = {} + tag = f"new_{file_client}" + chunk = LOW_CHUNKS[tag][0] + with patch.object(react_wrap, "client_cache", client_cache): + if f"{file_client}" == "runner": + react_wrap.runner(chunk) + elif f"{file_client}" == "wheel": + react_wrap.wheel(chunk) + else: + # catch need for new check + assert f"{file_client}" == "bad parameterization" + + file_client_key = None + for key in react_wrap.client_cache.keys(): + if key == f"{file_client}": + file_client_key = key + + assert file_client_key == f"{file_client}" diff --git a/tests/support/pkg.py b/tests/support/pkg.py index 4508de22281..662f17d1977 100644 --- a/tests/support/pkg.py +++ b/tests/support/pkg.py @@ -8,7 +8,7 @@ import re import shutil import textwrap import time -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING import attr import distro @@ -50,7 +50,7 @@ class SaltPkgInstall: ssm_bin: pathlib.Path = attr.ib(default=None) bin_dir: pathlib.Path = attr.ib(default=None) install_dir: pathlib.Path = attr.ib(init=False) - binary_paths: Dict[str, List[pathlib.Path]] = attr.ib(init=False) + binary_paths: dict[str, list[pathlib.Path]] = attr.ib(init=False) config_path: str = attr.ib(init=False) conf_dir: pathlib.Path = attr.ib() @@ -79,8 +79,8 @@ class SaltPkgInstall: pkg_mngr: str = attr.ib(init=False) rm_pkg: str = attr.ib(init=False) dbg_pkg: str = attr.ib(init=False) - salt_pkgs: List[str] = attr.ib(init=False) - pkgs: List[str] = attr.ib(factory=list) + salt_pkgs: list[str] = attr.ib(init=False) + pkgs: list[str] = attr.ib(factory=list) file_ext: bool = attr.ib(default=None) relenv: bool = attr.ib(default=True) @@ -1566,7 +1566,7 @@ class ApiRequest: account: TestAccount = attr.ib(repr=False) session: requests.Session = attr.ib(init=False, repr=False) api_uri: str = attr.ib(init=False) - auth_data: Dict[str, str] = attr.ib(init=False) + auth_data: dict[str, str] = attr.ib(init=False) @session.default def _default_session(self): diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py index 9026dc6003a..6ad730a1459 100644 --- a/tests/support/pytest/helpers.py +++ b/tests/support/pytest/helpers.py @@ -333,8 +333,9 @@ class TestAccount: if salt.utils.platform.is_windows(): log.debug("Configuring system account: %s", self) ret = self.sminion.functions.user.update( - self.username, password_never_expires=True + self.username, expired=False, password_never_expires=True ) + assert ret is True if salt.utils.platform.is_darwin() or salt.utils.platform.is_windows(): password = self.password else: diff --git a/tools/__init__.py b/tools/__init__.py index af50a06ef47..9fc43adbf12 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -31,6 +31,7 @@ RELEASE_VENV_CONFIG = VirtualEnvPipConfig( ptscripts.set_default_config(DEFAULT_REQS_CONFIG) ptscripts.register_tools_module("tools.changelog") ptscripts.register_tools_module("tools.ci") +ptscripts.register_tools_module("tools.container") ptscripts.register_tools_module("tools.docs") ptscripts.register_tools_module("tools.gh") ptscripts.register_tools_module("tools.pkg") diff --git a/tools/container.py b/tools/container.py new file mode 100644 index 00000000000..43c8c11dbf4 --- /dev/null +++ b/tools/container.py @@ -0,0 +1,108 @@ +import os + +from ptscripts import Context, command_group + +cmd = command_group(name="container", help="Container Commands", description=__doc__) + + +def has_network(ctx, name): + p = ctx.run("docker", "network", "ls", capture=True) + return name in p.stdout.decode() + + +def create_network(ctx, name): + p = ctx.run( + "docker", + "network", + "create", + "-o", + "com.docker.network.driver.mtu=1500", + "--ipv6", + "--subnet", + "2001:db8::/64", + name, + ) + if p.returncode != 0: + raise RuntimeError(f"docker network create returned {p.returncode}") + + +@cmd.command( + name="create", + arguments={ + "image": {"help": "Name the container image to use."}, + "name": {"help": "Name the container being created.", "default": ""}, + }, +) +def create(ctx: Context, image: str, name: str = ""): + onci = "GITHUB_WORKFLOW" in os.environ + workdir = "/salt" + home = "/root" + network = "ip6net" + if not onci and not has_network(ctx, network): + ctx.info(f"Creating docker network: {network}") + create_network(ctx, network) + if onci: + workdir = "/__w/salt/salt" + home = "/github/home" + env = { + "HOME": home, + "SKIP_REQUIREMENTS_INSTALL": "1", + "PRINT_TEST_SELECTION": "0", + "PRINT_TEST_PLAN_ONLY": "0", + "PRINT_SYSTEM_INFO": "0", + "RERUN_FAILURES": "0", + "SKIP_INITIAL_ONEDIR_FAILURES": "1", + "SKIP_INITIAL_GH_ACTIONS_FAILURES": "1", + "RAISE_DEPRECATIONS_RUNTIME_ERRORS": "1", + "LANG": "en_US.UTF-8", + "SHELL": "/bin/bash", + } + for var in [ + "PIP_INDEX_URL", + "PIP_EXTRA_INDEX_URL", + "PIP_TRUSTED_HOST", + "PIP_DISABLE_PIP_VERSION_CHECK", + "SALT_TRANSPORT", + # Are both of these really needed? + "GITHUB_ACTIONS", + "GITHUB_ACTIONS_PIPELINE", + "CI", + "SKIP_CODE_COVERAGE", + "COVERAGE_CONTEXT", + "RERUN_FAILURES", + "COLUMNS", + ]: + if var in os.environ: + env[var] = os.environ[var] + cmd = [ + "/usr/bin/docker", + "create", + f"--name={name}", + "--privileged", + f"--workdir={workdir}", + "-v", + "/tmp/:/var/lib/docker", + ] + for key in env: + cmd.extend(["-e", f"{key}={env[key]}"]) + if onci: + cmd.extend(["-v", "/home/runner/work:/__w"]) + else: + cmd.extend(["-v", f"{os.getcwd()}:/salt"]) + cmd.extend(["--network", network]) + if name: + cmd.extend(["--name", name]) + cmd.extend( + [ + "--entrypoint", + "/usr/lib/systemd/systemd", + image, + "--systemd", + "--unit", + "rescue.target", + ], + ) + ctx.info(f"command is: {cmd}") + ret = ctx.run(*cmd, capture=True, check=False) + if ret.returncode != 0: + ctx.warn(ret.stderr.decode())