diff --git a/.github/actions/cached-virtualenv/action.yml b/.github/actions/cached-virtualenv/action.yml index 23ac4a410ff..7620e52c399 100644 --- a/.github/actions/cached-virtualenv/action.yml +++ b/.github/actions/cached-virtualenv/action.yml @@ -42,19 +42,29 @@ runs: run: | echo "cache-key=${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|cached-venv|${{ steps.get-python-version.outputs.version }}|${{ inputs.name }}" >> "${GITHUB_OUTPUT}" + - name: Define VirtualEnv path + shell: bash + id: virtualenv-path + run: | + cd ${{ github.workspace }} > /dev/null 2>&1 || true + VENVS_PATH=$(echo ".venvs/py${{ steps.get-python-version.outputs.version }}" | python3 -c 'import sys, pathlib; sys.stdout.write(pathlib.Path.cwd().joinpath(sys.stdin.read()).as_posix())') + echo "venvs-path=$VENVS_PATH" | tee -a "$GITHUB_OUTPUT" + VENV_PATH=$(echo ".venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}" | python3 -c 'import sys, pathlib; sys.stdout.write(pathlib.Path.cwd().joinpath(sys.stdin.read()).as_posix())') + echo "venv-path=$VENV_PATH" | tee -a "$GITHUB_OUTPUT" + - name: Cache VirtualEnv id: cache-virtualenv uses: actions/cache@v3 with: key: ${{ steps.setup-cache-key.outputs.cache-key }} - path: ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }} + path: ${{ steps.virtualenv-path.outputs.venv-path }} - name: Create Virtualenv shell: bash if: ${{ steps.cache-virtualenv.outputs.cache-hit != 'true' }} run: | - mkdir -p ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }} - python3 -m venv --upgrade ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }} + mkdir -p ${{ steps.virtualenv-path.outputs.venvs-path }} + python3 -m venv --upgrade ${{ steps.virtualenv-path.outputs.venv-path }} - name: Define python executable output shell: bash @@ -62,10 +72,22 @@ runs: run: | shopt -s nocasematch if [[ "${{ runner.os }}" =~ "win" ]]; then - BIN_DIR="${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}/Scripts" + BIN_DIR="${{ steps.virtualenv-path.outputs.venv-path }}/Scripts" + PY_EXE="$BIN_DIR/python.exe" else - BIN_DIR="${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}/bin" + BIN_DIR="${{ steps.virtualenv-path.outputs.venv-path }}/bin" + PY_EXE="$BIN_DIR/python3" + if [ ! -f "$PY_EXE" ]; then + echo "The '${PY_EXE}' binary does not exist. Setting it to '$BIN_DIR/python' ..." + PY_EXE="$BIN_DIR/python" + fi + if [ ! -f "$PY_EXE" ]; then + echo "The '${PY_EXE}' binary does not exist. Showing the tree output for '${BIN_DIR}' ..." + tree -a "$BIN_DIR" + exit 1 + fi fi shopt -u nocasematch - echo "python-executable=$BIN_DIR/python" >> "${GITHUB_OUTPUT}" - echo "${BIN_DIR}" >> "${GITHUB_PATH}" + $PY_EXE --version + echo "python-executable=$PY_EXE" | tee -a "${GITHUB_OUTPUT}" + echo "${BIN_DIR}" | tee -a "${GITHUB_PATH}" diff --git a/.github/actions/get-python-version/action.yml b/.github/actions/get-python-version/action.yml index e64d285bca5..f2b045f7ca7 100644 --- a/.github/actions/get-python-version/action.yml +++ b/.github/actions/get-python-version/action.yml @@ -13,6 +13,8 @@ outputs: value: ${{ steps.get-python-version.outputs.version }} full-version: value: ${{ steps.get-python-version.outputs.full-version }} + version-sha256sum: + value: ${{ steps.get-python-version.outputs.version-sha256sum }} runs: @@ -20,12 +22,24 @@ runs: steps: + - name: Install System Packages + if: ${{ runner.os == 'macOS' }} + shell: bash + run: | + brew install coreutils + - name: Get Python Version id: get-python-version shell: bash run: | + echo "Python Binary: ${{ inputs.python-binary }}" echo "binary=${{ inputs.python-binary }}" >> "$GITHUB_OUTPUT" PY_VERSION=$(${{ inputs.python-binary }} -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info))") + echo "PY_VERSION=$PY_VERSION" echo "version=$PY_VERSION" >> "$GITHUB_OUTPUT" PY_FULL_VERSION=$(${{ inputs.python-binary }} -c "import sys; sys.stdout.write('{}.{}.{}'.format(*sys.version_info))") + echo "PY_FULL_VERSION=$PY_FULL_VERSION" echo "full-version=$PY_FULL_VERSION" >> "$GITHUB_OUTPUT" + VERSION_SHA256SUM=$(${{ inputs.python-binary }} --version --version | sha256sum | cut -d ' ' -f 1) + echo "VERSION_SHA256SUM=$VERSION_SHA256SUM" + echo "version-sha256sum=$VERSION_SHA256SUM" >> "$GITHUB_OUTPUT" diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 72bcf3b1d37..eec3c4e4e96 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -3,6 +3,10 @@ name: setup-python-tools-scripts description: Setup 'python-tools-scripts' inputs: + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches cwd: type: string description: The directory the salt checkout is located in @@ -29,15 +33,38 @@ runs: with: python-binary: python3 + - name: Define Cache Hash + id: venv-hash + shell: bash + run: | + VENV_NAME_HASH=$(echo "${{ inputs.cache-prefix }}|${{ github.workflow }}|${{ + steps.get-python-version.outputs.version-sha256sum }}|${{ + hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}" | sha256sum | cut -d ' ' -f 1) + echo "TOOLS_VIRTUALENV_CACHE_SEED=$VENV_NAME_HASH" | tee -a "${GITHUB_ENV}" + echo "venv-hash=$VENV_NAME_HASH" | tee -a "${GITHUB_OUTPUT}" + + - uses: ./.github/actions/cached-virtualenv + id: tools-virtualenv + with: + name: tools.${{ steps.venv-hash.outputs.venv-hash }} + cache-seed: tools|${{ steps.venv-hash.outputs.venv-hash }} + + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: ${{ inputs.cwd }}/.tools-venvs + key: ${{ inputs.cache-prefix }}|${{ steps.venv-hash.outputs.venv-hash }} + - name: Install 'python-tools-scripts' shell: bash working-directory: ${{ inputs.cwd }} run: | - (python3 -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 + PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }} + (${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 if [ $exitcode -eq 0 ]; then - python3 -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + ${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt else - python3 -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + ${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt fi - name: Get 'python-tools-scripts' Version @@ -45,5 +72,7 @@ runs: shell: bash working-directory: ${{ inputs.cwd }} run: | - VERSION=$(tools --version) + # The first time `tools` runs with newer virtual enviroments we need to disregard the output + tools --debug --version + VERSION=$(tools --version | tail -n 1) echo "version=$VERSION" >> "${GITHUB_OUTPUT}" diff --git a/.github/workflows/build-deb-packages.yml b/.github/workflows/build-deb-packages.yml index 31cc710ed3f..4d7bbdcc824 100644 --- a/.github/workflows/build-deb-packages.yml +++ b/.github/workflows/build-deb-packages.yml @@ -20,6 +20,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -75,6 +79,7 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cwd: pkgs/checkout/ + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/build-deps-onedir-linux.yml b/.github/workflows/build-deps-onedir-linux.yml index ebd686defdf..8d149c46261 100644 --- a/.github/workflows/build-deps-onedir-linux.yml +++ b/.github/workflows/build-deps-onedir-linux.yml @@ -59,6 +59,11 @@ jobs: - uses: actions/checkout@v4 + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-deps-linux-${{ matrix.arch }} + - name: Setup Relenv id: setup-relenv uses: ./.github/actions/setup-relenv @@ -69,9 +74,6 @@ jobs: cache-seed: ${{ inputs.cache-seed }} python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Install Salt Packaging Dependencies into Relenv Onedir uses: ./.github/actions/build-onedir-deps with: diff --git a/.github/workflows/build-deps-onedir-macos.yml b/.github/workflows/build-deps-onedir-macos.yml index 033a650d0a4..02cf21c5365 100644 --- a/.github/workflows/build-deps-onedir-macos.yml +++ b/.github/workflows/build-deps-onedir-macos.yml @@ -61,6 +61,11 @@ jobs: with: python-version: "3.10" + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-deps-macos + - name: Setup Relenv id: setup-relenv uses: ./.github/actions/setup-relenv @@ -71,9 +76,6 @@ jobs: cache-seed: ${{ inputs.cache-seed }} python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Install Salt Packaging Dependencies into Relenv Onedir uses: ./.github/actions/build-onedir-deps with: diff --git a/.github/workflows/build-deps-onedir-windows.yml b/.github/workflows/build-deps-onedir-windows.yml index bb7538a6ef0..af741e06224 100644 --- a/.github/workflows/build-deps-onedir-windows.yml +++ b/.github/workflows/build-deps-onedir-windows.yml @@ -62,6 +62,11 @@ jobs: with: python-version: "3.10" + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-deps-windows-${{ matrix.arch }} + - name: Setup Relenv id: setup-relenv uses: ./.github/actions/setup-relenv @@ -72,9 +77,6 @@ jobs: cache-seed: ${{ inputs.cache-seed }} python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Install Salt Packaging Dependencies into Relenv Onedir uses: ./.github/actions/build-onedir-deps with: diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index eef8243169f..fea955d9d66 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -56,6 +56,8 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }} - name: Configure Git if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} diff --git a/.github/workflows/build-macos-packages.yml b/.github/workflows/build-macos-packages.yml index 440aefba715..67044951b5a 100644 --- a/.github/workflows/build-macos-packages.yml +++ b/.github/workflows/build-macos-packages.yml @@ -28,6 +28,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -81,6 +85,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index dfd62c10e8e..1b2103700c9 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -20,6 +20,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -64,6 +68,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/build-windows-packages.yml b/.github/workflows/build-windows-packages.yml index 821d33c60d4..d8c28b96f45 100644 --- a/.github/workflows/build-windows-packages.yml +++ b/.github/workflows/build-windows-packages.yml @@ -28,6 +28,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -92,6 +96,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1e387d3940e..661c4533e6c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -137,6 +137,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -275,28 +277,15 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -381,6 +370,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -427,15 +417,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -443,12 +428,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: @@ -465,7 +444,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -479,7 +458,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -493,7 +472,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -509,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -525,7 +504,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -541,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -553,7 +532,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -566,7 +546,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -579,7 +560,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -592,7 +574,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -605,7 +588,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -618,7 +602,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -631,7 +616,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -644,7 +630,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -835,6 +822,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1316,6 +1337,72 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1382,6 +1469,28 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1557,6 +1666,7 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1579,6 +1689,7 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1601,6 +1712,7 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test @@ -1623,6 +1735,7 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test @@ -1901,7 +2014,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1923,7 +2036,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1945,7 +2058,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1967,7 +2080,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1989,7 +2102,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2011,7 +2124,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2033,7 +2146,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2055,7 +2168,73 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2077,7 +2256,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2099,7 +2278,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2121,7 +2300,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2143,7 +2322,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2165,7 +2344,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2187,7 +2366,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2209,7 +2388,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2231,7 +2410,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2253,7 +2432,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2275,7 +2454,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2297,7 +2476,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2319,7 +2498,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2341,7 +2520,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2363,7 +2542,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2385,7 +2564,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2393,6 +2572,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2407,7 +2587,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2415,6 +2595,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true photonos-5: name: Photon OS 5 Test @@ -2429,7 +2610,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2437,6 +2618,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2451,7 +2633,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2459,6 +2641,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2473,7 +2656,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2495,7 +2678,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2517,7 +2700,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2539,7 +2722,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2565,6 +2748,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2600,6 +2785,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2634,6 +2822,8 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | @@ -2758,6 +2948,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2793,6 +2985,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2816,9 +3011,13 @@ jobs: - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index efe7ba6ada2..32dbb61d22b 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -22,7 +22,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -181,6 +181,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -319,28 +321,15 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -425,6 +414,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -471,15 +461,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -487,12 +472,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: @@ -509,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -523,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -537,7 +516,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -553,7 +532,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -569,7 +548,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -585,7 +564,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -597,7 +576,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -610,7 +590,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -623,7 +604,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -636,7 +618,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -649,7 +632,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: nightly @@ -665,7 +649,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: nightly @@ -681,7 +666,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: nightly @@ -697,7 +683,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: nightly @@ -891,6 +878,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1372,6 +1393,72 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1438,6 +1525,28 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1613,6 +1722,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1635,6 +1745,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1657,6 +1768,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test @@ -1679,6 +1791,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test @@ -1957,7 +2070,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1979,7 +2092,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2001,7 +2114,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2023,7 +2136,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2045,7 +2158,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2067,7 +2180,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2089,7 +2202,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2111,7 +2224,73 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2133,7 +2312,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2155,7 +2334,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2177,7 +2356,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2199,7 +2378,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2221,7 +2400,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2243,7 +2422,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2265,7 +2444,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2287,7 +2466,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2309,7 +2488,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2331,7 +2510,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2353,7 +2532,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2375,7 +2554,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2397,7 +2576,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2419,7 +2598,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2441,7 +2620,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2449,6 +2628,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2463,7 +2643,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2471,6 +2651,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true photonos-5: name: Photon OS 5 Test @@ -2485,7 +2666,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2493,6 +2674,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2507,7 +2689,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2515,6 +2697,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2529,7 +2712,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2551,7 +2734,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2573,7 +2756,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2595,7 +2778,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2621,6 +2804,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2656,6 +2841,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2690,6 +2878,8 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | @@ -2801,7 +2991,9 @@ jobs: - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts + uses: ./.github/actions/setup-python-tools-script + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}s - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -2944,6 +3136,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3027,11 +3221,31 @@ jobs: - pkg-type: rpm distro: amazon version: "2" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2" + arch: aarch64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2023" arch: aarch64 - pkg-type: rpm distro: redhat version: "7" arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "7" + arch: arm64 - pkg-type: rpm distro: redhat version: "7" @@ -3040,6 +3254,10 @@ jobs: distro: redhat version: "8" arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "8" + arch: arm64 - pkg-type: rpm distro: redhat version: "8" @@ -3048,6 +3266,10 @@ jobs: distro: redhat version: "9" arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "9" + arch: arm64 - pkg-type: rpm distro: redhat version: "9" @@ -3056,6 +3278,10 @@ jobs: distro: fedora version: "36" arch: x86_64 + - pkg-type: rpm + distro: fedora + version: "36" + arch: arm64 - pkg-type: rpm distro: fedora version: "36" @@ -3064,6 +3290,10 @@ jobs: distro: fedora version: "37" arch: x86_64 + - pkg-type: rpm + distro: fedora + version: "37" + arch: arm64 - pkg-type: rpm distro: fedora version: "37" @@ -3072,6 +3302,10 @@ jobs: distro: fedora version: "38" arch: x86_64 + - pkg-type: rpm + distro: fedora + version: "38" + arch: arm64 - pkg-type: rpm distro: fedora version: "38" @@ -3080,6 +3314,10 @@ jobs: distro: photon version: "3" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "3" + arch: arm64 - pkg-type: rpm distro: photon version: "3" @@ -3088,6 +3326,10 @@ jobs: distro: photon version: "4" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "4" + arch: arm64 - pkg-type: rpm distro: photon version: "4" @@ -3096,6 +3338,10 @@ jobs: distro: photon version: "5" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "5" + arch: arm64 - pkg-type: rpm distro: photon version: "5" @@ -3111,6 +3357,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3121,7 +3369,7 @@ jobs: - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG @@ -3197,6 +3445,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3297,6 +3547,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3381,6 +3633,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3505,6 +3759,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -3540,6 +3796,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -3574,6 +3833,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 @@ -3624,9 +3885,13 @@ jobs: - combine-all-code-coverage - publish-repositories - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml index 9429c45ad1c..820874f849c 100644 --- a/.github/workflows/pre-commit-action.yml +++ b/.github/workflows/pre-commit-action.yml @@ -26,6 +26,9 @@ jobs: container: image: ghcr.io/saltstack/salt-ci-containers/python:3.10 + env: + PRE_COMMIT_COLOR: always + steps: - name: Install System Deps diff --git a/.github/workflows/release-upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml index d47d6ce6a6f..da13d83ca80 100644 --- a/.github/workflows/release-upload-virustotal.yml +++ b/.github/workflows/release-upload-virustotal.yml @@ -43,6 +43,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: virus-total - name: Upload to VirusTotal env: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c4330c3ef74..18248f6031a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -70,6 +70,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -142,6 +144,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -337,6 +341,38 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps needs: @@ -792,6 +828,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Backup Previous Releases id: backup @@ -822,6 +860,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Publish Release Repository env: @@ -842,6 +882,8 @@ jobs: - almalinux-9-ci-deps - amazonlinux-2-arm64-ci-deps - amazonlinux-2-ci-deps + - amazonlinux-2023-arm64-ci-deps + - amazonlinux-2023-ci-deps - centos-7-arm64-ci-deps - centos-7-ci-deps - centosstream-8-arm64-ci-deps @@ -904,6 +946,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -1007,6 +1051,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -1068,6 +1114,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index f8cf3d33d49..523c3369201 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -12,7 +12,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -171,6 +171,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -309,28 +311,15 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -415,6 +404,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -461,15 +451,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -477,12 +462,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: @@ -499,7 +478,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -513,7 +492,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -527,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -543,7 +522,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -559,7 +538,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -575,7 +554,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -587,7 +566,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -600,7 +580,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -613,7 +594,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -626,7 +608,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -639,7 +622,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -652,7 +636,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -665,7 +650,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -678,7 +664,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -869,6 +856,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1350,6 +1371,72 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1416,6 +1503,28 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1591,6 +1700,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1613,6 +1723,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1635,6 +1746,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test @@ -1657,6 +1769,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test @@ -1935,7 +2048,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1957,7 +2070,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1979,7 +2092,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2001,7 +2114,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2023,7 +2136,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2045,7 +2158,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2067,7 +2180,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2089,7 +2202,73 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2111,7 +2290,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2133,7 +2312,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2155,7 +2334,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2177,7 +2356,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2199,7 +2378,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2221,7 +2400,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2243,7 +2422,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2265,7 +2444,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2287,7 +2466,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2309,7 +2488,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2331,7 +2510,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2353,7 +2532,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2375,7 +2554,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2397,7 +2576,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2419,7 +2598,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2427,6 +2606,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2441,7 +2621,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2449,6 +2629,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true photonos-5: name: Photon OS 5 Test @@ -2463,7 +2644,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2471,6 +2652,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2485,7 +2667,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2493,6 +2675,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2507,7 +2690,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2529,7 +2712,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2551,7 +2734,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2573,7 +2756,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2599,6 +2782,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2634,6 +2819,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2668,6 +2856,8 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | @@ -2794,6 +2984,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2829,6 +3021,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2852,9 +3047,13 @@ jobs: - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 45aa6f6d6b8..ac5fff00bf0 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -37,7 +37,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -167,6 +167,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -318,28 +320,10 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -425,6 +409,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -471,15 +456,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -487,12 +467,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: @@ -509,7 +483,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -523,7 +497,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -537,7 +511,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -553,7 +527,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -569,7 +543,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -585,7 +559,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -597,7 +571,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -610,7 +585,8 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -623,7 +599,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -636,7 +613,8 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -649,7 +627,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: staging @@ -665,7 +644,8 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: staging @@ -681,7 +661,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: staging @@ -697,7 +678,8 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: staging @@ -891,6 +873,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1372,6 +1388,72 @@ jobs: skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1438,6 +1520,28 @@ jobs: skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1613,6 +1717,7 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1635,6 +1740,7 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1657,6 +1763,7 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test @@ -1679,6 +1786,7 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test @@ -1957,7 +2065,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1979,7 +2087,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2001,7 +2109,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2023,7 +2131,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2045,7 +2153,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2067,7 +2175,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2089,7 +2197,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2111,7 +2219,73 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2133,7 +2307,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2155,7 +2329,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2177,7 +2351,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2199,7 +2373,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2221,7 +2395,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2243,7 +2417,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2265,7 +2439,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2287,7 +2461,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2309,7 +2483,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2331,7 +2505,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2353,7 +2527,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2375,7 +2549,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2397,7 +2571,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2419,7 +2593,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2441,7 +2615,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2449,6 +2623,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2463,7 +2638,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2471,6 +2646,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true photonos-5: name: Photon OS 5 Test @@ -2485,7 +2661,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2493,6 +2669,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2507,7 +2684,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2515,6 +2692,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2529,7 +2707,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2551,7 +2729,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2573,7 +2751,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2595,7 +2773,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2624,7 +2802,9 @@ jobs: - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts + uses: ./.github/actions/setup-python-tools-script + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}s - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -2767,6 +2947,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -2850,11 +3032,31 @@ jobs: - pkg-type: rpm distro: amazon version: "2" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2" + arch: aarch64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2023" arch: aarch64 - pkg-type: rpm distro: redhat version: "7" arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "7" + arch: arm64 - pkg-type: rpm distro: redhat version: "7" @@ -2863,6 +3065,10 @@ jobs: distro: redhat version: "8" arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "8" + arch: arm64 - pkg-type: rpm distro: redhat version: "8" @@ -2871,6 +3077,10 @@ jobs: distro: redhat version: "9" arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "9" + arch: arm64 - pkg-type: rpm distro: redhat version: "9" @@ -2879,6 +3089,10 @@ jobs: distro: fedora version: "36" arch: x86_64 + - pkg-type: rpm + distro: fedora + version: "36" + arch: arm64 - pkg-type: rpm distro: fedora version: "36" @@ -2887,6 +3101,10 @@ jobs: distro: fedora version: "37" arch: x86_64 + - pkg-type: rpm + distro: fedora + version: "37" + arch: arm64 - pkg-type: rpm distro: fedora version: "37" @@ -2895,6 +3113,10 @@ jobs: distro: fedora version: "38" arch: x86_64 + - pkg-type: rpm + distro: fedora + version: "38" + arch: arm64 - pkg-type: rpm distro: fedora version: "38" @@ -2903,6 +3125,10 @@ jobs: distro: photon version: "3" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "3" + arch: arm64 - pkg-type: rpm distro: photon version: "3" @@ -2911,6 +3137,10 @@ jobs: distro: photon version: "4" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "4" + arch: arm64 - pkg-type: rpm distro: photon version: "4" @@ -2919,6 +3149,10 @@ jobs: distro: photon version: "5" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "5" + arch: arm64 - pkg-type: rpm distro: photon version: "5" @@ -2934,6 +3168,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -2944,7 +3180,7 @@ jobs: - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG @@ -3022,6 +3258,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3122,6 +3360,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3206,6 +3446,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3331,6 +3573,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 @@ -3376,6 +3620,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Release Patch uses: actions/download-artifact@v3 @@ -3431,6 +3677,8 @@ jobs: - almalinux-9-ci-deps - amazonlinux-2-arm64-ci-deps - amazonlinux-2-ci-deps + - amazonlinux-2023-arm64-ci-deps + - amazonlinux-2023-ci-deps - centos-7-arm64-ci-deps - centos-7-ci-deps - centosstream-8-arm64-ci-deps @@ -3491,6 +3739,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -3526,6 +3776,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -3549,9 +3802,13 @@ jobs: - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests @@ -3586,6 +3843,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/templates/build-deb-repo.yml.jinja b/.github/workflows/templates/build-deb-repo.yml.jinja index 165c60c02e9..8d9c054405f 100644 --- a/.github/workflows/templates/build-deb-repo.yml.jinja +++ b/.github/workflows/templates/build-deb-repo.yml.jinja @@ -31,6 +31,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja index 5f9b14a9904..916686f5968 100644 --- a/.github/workflows/templates/build-macos-repo.yml.jinja +++ b/.github/workflows/templates/build-macos-repo.yml.jinja @@ -10,6 +10,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja index 70deec70b7d..9b1daf3ce7e 100644 --- a/.github/workflows/templates/build-onedir-repo.yml.jinja +++ b/.github/workflows/templates/build-onedir-repo.yml.jinja @@ -10,6 +10,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 37cae00b1e8..b5086a75e58 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -20,6 +20,7 @@ uses: ./.github/workflows/build-<{ pkg_type }>-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "<{ relenv_version }>" python-version: "<{ python_version }>" source: "<{ backend }>" diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 405461a99b3..7e99a968696 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -5,24 +5,37 @@ include: <%- for distro, version, arch in ( ("amazon", "2", "x86_64"), + ("amazon", "2", "arm64"), ("amazon", "2", "aarch64"), + ("amazon", "2023", "x86_64"), + ("amazon", "2023", "arm64"), + ("amazon", "2023", "aarch64"), ("redhat", "7", "x86_64"), + ("redhat", "7", "arm64"), ("redhat", "7", "aarch64"), ("redhat", "8", "x86_64"), + ("redhat", "8", "arm64"), ("redhat", "8", "aarch64"), ("redhat", "9", "x86_64"), + ("redhat", "9", "arm64"), ("redhat", "9", "aarch64"), ("fedora", "36", "x86_64"), + ("fedora", "36", "arm64"), ("fedora", "36", "aarch64"), ("fedora", "37", "x86_64"), + ("fedora", "37", "arm64"), ("fedora", "37", "aarch64"), ("fedora", "38", "x86_64"), + ("fedora", "38", "arm64"), ("fedora", "38", "aarch64"), ("photon", "3", "x86_64"), + ("photon", "3", "arm64"), ("photon", "3", "aarch64"), ("photon", "4", "x86_64"), + ("photon", "4", "arm64"), ("photon", "4", "aarch64"), ("photon", "5", "x86_64"), + ("photon", "5", "arm64"), ("photon", "5", "aarch64"), ) %> - pkg-type: rpm @@ -41,6 +54,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -51,7 +66,7 @@ - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja index f0c1a82b7c3..06f1745c8ca 100644 --- a/.github/workflows/templates/build-src-repo.yml.jinja +++ b/.github/workflows/templates/build-src-repo.yml.jinja @@ -9,7 +9,9 @@ - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts + uses: ./.github/actions/setup-python-tools-script + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}s - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-windows-repo.yml.jinja b/.github/workflows/templates/build-windows-repo.yml.jinja index dc96a8a2e31..a86daf7f58b 100644 --- a/.github/workflows/templates/build-windows-repo.yml.jinja +++ b/.github/workflows/templates/build-windows-repo.yml.jinja @@ -10,6 +10,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 2c64fa4ffeb..68deffc90bf 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -59,28 +59,19 @@ steps: - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + <%- if not prepare_actual_release %> + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" + + <%- endif %> - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -168,6 +159,7 @@ if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -227,15 +219,10 @@ with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -243,12 +230,6 @@ with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: @@ -351,6 +332,8 @@ - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 23a31a1dea7..92d97e4e716 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -34,7 +34,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" <%- endblock env %> @@ -185,6 +185,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index e4f6bb8439e..e4350f44a36 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -167,6 +167,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 7c5c28af059..ae1216ccbf0 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -98,6 +98,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -176,6 +178,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -220,6 +224,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Backup Previous Releases id: backup @@ -251,6 +257,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Publish Release Repository env: @@ -287,6 +295,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -403,6 +413,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Restore Release Bucket run: | @@ -427,6 +439,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index 8e3a0c98d9f..c84ade07636 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -104,6 +104,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Release Patch uses: actions/download-artifact@v3 @@ -187,6 +189,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja index 348b0d17227..eec67378ba1 100644 --- a/.github/workflows/templates/test-package-downloads-action.yml.jinja +++ b/.github/workflows/templates/test-package-downloads-action.yml.jinja @@ -84,7 +84,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -92,13 +92,13 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 with: path: nox.${{ matrix.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache @@ -106,6 +106,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-linux - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -555,6 +557,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-windows - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index ad9d122f7ad..eb8b43d071f 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -1,5 +1,4 @@ - <%- for slug, display_name, arch, pkg_type in test_salt_pkg_listing["linux"] %> - + <%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %> <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> <{ job_name }>: @@ -24,6 +23,9 @@ skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + <%- if fips == "fips" %> + fips: true + <%- endif %> <%- endfor %> diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index be3e7fd5fa4..e99773276aa 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -20,7 +20,7 @@ platform: windows arch: amd64 nox-version: <{ nox_version }> - python-version: "<{ gh_actions_workflows_python_version }>" + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> @@ -48,7 +48,7 @@ platform: darwin arch: x86_64 nox-version: <{ nox_version }> - python-version: "<{ gh_actions_workflows_python_version }>" + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> @@ -59,8 +59,7 @@ <%- endfor %> - - <%- for slug, display_name, arch in test_salt_listing["linux"] %> + <%- for slug, display_name, arch, fips in test_salt_listing["linux"] %> <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> @@ -76,7 +75,7 @@ platform: linux arch: <{ arch }> nox-version: <{ nox_version }> - python-version: "<{ gh_actions_workflows_python_version }>" + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> @@ -84,5 +83,8 @@ skip-junit-reports: <{ skip_junit_reports_check }> workflow-slug: <{ workflow_slug }> default-timeout: <{ timeout_value }> + <%- if fips == "fips" %> + fips: true + <%- endif %> <%- endfor %> diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 6eb610302c0..b7cc93d5e8c 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -16,7 +16,7 @@ on: required: true type: string description: JSON string containing information about what and how to run the test suite - python-version: + gh-actions-python-version: required: false type: string description: The python version to run tests with @@ -91,6 +91,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Test Matrix id: generate-matrix @@ -147,16 +149,16 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.gh-actions-python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true - - name: Set up Python ${{ inputs.python-version }} + - name: Set up Python ${{ inputs.gh-actions-python-version }} uses: actions/setup-python@v4 with: - python-version: "${{ inputs.python-version }}" + python-version: "${{ inputs.gh-actions-python-version }}" - name: Install Nox run: | @@ -401,10 +403,10 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} + - name: Set up Python ${{ inputs.gh-actions-python-version }} uses: actions/setup-python@v4 with: - python-version: "${{ inputs.python-version }}" + python-version: "${{ inputs.gh-actions-python-version }}" - name: Install Nox run: | diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 53e7bbfa894..ce5ac179a7d 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -36,11 +36,16 @@ on: required: true type: string description: The nox version to install - python-version: + gh-actions-python-version: required: false type: string description: The python version to run tests with default: "3.10" + fips: + required: false + type: boolean + default: false + description: Test run with FIPS enabled package-name: required: false type: string @@ -96,11 +101,13 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Test Matrix id: generate-matrix run: | - tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }} + tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} test: name: Test @@ -152,7 +159,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.gh-actions-python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache @@ -164,6 +171,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Download testrun-changed-files.txt if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} @@ -207,7 +216,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -216,7 +225,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -225,7 +234,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -235,14 +244,14 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} + ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} - name: Run Slow Tests id: run-slow-tests if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests - name: Run Core Tests @@ -250,7 +259,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests - name: Run Flaky Tests @@ -258,7 +267,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail - name: Run Full Tests @@ -267,7 +276,7 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - -E TEST_GROUP ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ + -E TEST_GROUP ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }} - name: Combine Coverage Reports diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index 32879958651..e53352abe99 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -71,36 +71,63 @@ jobs: - distro-slug: almalinux-8-arm64 arch: aarch64 pkg-type: package + - distro-slug: almalinux-8-arm64 + arch: arm64 + pkg-type: package - distro-slug: almalinux-9 arch: x86_64 pkg-type: package - distro-slug: almalinux-9-arm64 arch: aarch64 pkg-type: package + - distro-slug: almalinux-9-arm64 + arch: arm64 + pkg-type: package - distro-slug: amazonlinux-2 arch: x86_64 pkg-type: package - distro-slug: amazonlinux-2-arm64 arch: aarch64 pkg-type: package + - distro-slug: amazonlinux-2-arm64 + arch: arm64 + pkg-type: package + - distro-slug: amazonlinux-2023 + arch: x86_64 + pkg-type: package + - distro-slug: amazonlinux-2023-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: amazonlinux-2023-arm64 + arch: arm64 + pkg-type: package - distro-slug: centos-7 arch: x86_64 pkg-type: package - distro-slug: centos-7-arm64 arch: aarch64 pkg-type: package + - distro-slug: centos-7-arm64 + arch: arm64 + pkg-type: package - distro-slug: centosstream-8 arch: x86_64 pkg-type: package - distro-slug: centosstream-8-arm64 arch: aarch64 pkg-type: package + - distro-slug: centosstream-8-arm64 + arch: arm64 + pkg-type: package - distro-slug: centosstream-9 arch: x86_64 pkg-type: package - distro-slug: centosstream-9-arm64 arch: aarch64 pkg-type: package + - distro-slug: centosstream-9-arm64 + arch: arm64 + pkg-type: package - distro-slug: debian-10 arch: x86_64 pkg-type: package @@ -122,30 +149,45 @@ jobs: - distro-slug: fedora-37-arm64 arch: aarch64 pkg-type: package + - distro-slug: fedora-37-arm64 + arch: arm64 + pkg-type: package - distro-slug: fedora-38 arch: x86_64 pkg-type: package - distro-slug: fedora-38-arm64 arch: aarch64 pkg-type: package + - distro-slug: fedora-38-arm64 + arch: arm64 + pkg-type: package - distro-slug: photonos-3 arch: x86_64 pkg-type: package - distro-slug: photonos-3-arm64 arch: aarch64 pkg-type: package + - distro-slug: photonos-3-arm64 + arch: arm64 + pkg-type: package - distro-slug: photonos-4 arch: x86_64 pkg-type: package - distro-slug: photonos-4-arm64 arch: aarch64 pkg-type: package + - distro-slug: photonos-4-arm64 + arch: arm64 + pkg-type: package - distro-slug: photonos-5 arch: x86_64 pkg-type: package - distro-slug: photonos-5-arm64 arch: aarch64 pkg-type: package + - distro-slug: photonos-5-arm64 + arch: arm64 + pkg-type: package - distro-slug: ubuntu-20.04 arch: x86_64 pkg-type: package @@ -178,7 +220,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -186,13 +228,13 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 with: path: nox.${{ matrix.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache @@ -200,6 +242,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-linux - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -657,6 +701,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-windows - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 41048bd4e34..7c2dbbec79e 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -70,7 +70,10 @@ jobs: generate-matrix: name: Generate Matrix - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: + - self-hosted + - linux + - x86_64 outputs: pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} steps: @@ -85,6 +88,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Package Test Matrix id: generate-pkg-matrix @@ -172,7 +177,7 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.test-chunk }} \ + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests @@ -186,7 +191,7 @@ jobs: SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" COVERAGE_CONTEXT: ${{ inputs.distro-slug }} run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.test-chunk }} \ + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Fix file ownership @@ -206,7 +211,7 @@ jobs: if: always() uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: | artifacts !artifacts/salt/* @@ -232,7 +237,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: artifacts - name: Show Test Run Artifacts @@ -254,6 +259,6 @@ jobs: # always run even if the previous steps fails if: always() && inputs.skip-junit-reports == false && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.test-chunk }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 1418bc93e30..b7d39a533f2 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -40,6 +40,11 @@ on: type: string description: The python version to run tests with default: "3.10" + fips: + required: false + type: boolean + default: false + description: Test run with FIPS enabled package-name: required: false type: string @@ -90,11 +95,14 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Package Test Matrix id: generate-pkg-matrix run: | - tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} + tools ci pkg-matrix ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} test: @@ -156,6 +164,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -181,7 +191,7 @@ jobs: tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} - name: Downgrade importlib-metadata - if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7", "debian-10"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.test-chunk) }} + if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7", "debian-10"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.tests-chunk) }} run: | # This step can go away once we stop testing classic packages upgrade/downgrades to/from 3005.x tools --timestamps vm ssh ${{ inputs.distro-slug }} -- "sudo python3 -m pip install -U 'importlib-metadata<=4.13.0' 'virtualenv<=20.21.1'" @@ -189,13 +199,13 @@ jobs: - name: Show System Info & Test Plan run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ matrix.fips && '--fips ' || '' }}\ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Download Test Run Artifacts @@ -217,7 +227,7 @@ jobs: if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: | artifacts !artifacts/salt/* @@ -246,7 +256,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: artifacts - name: Show Test Run Artifacts @@ -259,6 +269,6 @@ jobs: # always run even if the previous steps fails if: always() && inputs.skip-junit-reports == false && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.test-chunk }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.gitignore b/.gitignore index 76c0849dbb1..2cfa8e93c6e 100644 --- a/.gitignore +++ b/.gitignore @@ -147,3 +147,5 @@ nox.*.tar.xz /pkg/debian/salt-ssh /pkg/debian/salt-syndic /pkg/debian/debhelper-build-stamp + +.tools-venvs diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 73181874564..9ddc865f76e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,20 +46,16 @@ repos: )$ - repo: https://github.com/s0undt3ch/python-tools-scripts - rev: "0.15.0" + rev: "0.18.5" hooks: - id: tools alias: check-changelog-entries name: Check Changelog Entries args: + - pre-commit - changelog - pre-commit-checks - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0.1 - - jinja2==3.1.2 - - packaging==23.0 - - virustotal3==1.0.8 + - id: tools alias: generate-workflows name: Generate GitHub Workflow Templates @@ -67,13 +63,9 @@ repos: pass_filenames: false args: - pre-commit + - workflows - generate-workflows - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0.1 - - jinja2==3.1.2 - - packaging==23.0 - - virustotal3==1.0.8 + - id: tools alias: actionlint name: Lint GitHub Actions Workflows @@ -82,19 +74,77 @@ repos: - yaml args: - pre-commit + - workflows - actionlint - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0.1 - - jinja2==3.1.2 - - packaging==23.0 - - virustotal3==1.0.8 + + - id: tools + alias: check-docs + name: Check Docs + files: ^(salt/.*\.py|doc/ref/.*\.rst)$ + args: + - pre-commit + - docs + - check + + - id: tools + alias: check-docstrings + name: Check docstrings + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - docstrings + - check + + - id: tools + alias: check-known-missing-docstrings + name: Check Known Missing Docstrings + stages: [manual] + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - docstrings + - check + + - id: tools + alias: loader-check-virtual + name: Check loader modules __virtual__ + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - salt-loaders + - check-virtual + + - id: tools + alias: check-filemap + name: Check Filename Map Change Matching + files: ^tests/(filename_map\.yml|.*\.py)$ + pass_filenames: false + args: + - pre-commit + - filemap + - check + + # ----- Packaging Requirements ------------------------------------------------------------------------------------> - repo: https://github.com/saltstack/pip-tools-compile-impersonate rev: "4.8" hooks: - # ----- Packaging Requirements ------------------------------------------------------------------------------------> - id: pip-tools-compile alias: compile-pkg-linux-3.8-zmq-requirements name: Linux Packaging Py3.8 ZeroMQ Requirements @@ -155,6 +205,22 @@ repos: - --no-emit-index-url - requirements/static/pkg/linux.in + - id: pip-tools-compile + alias: compile-pkg-linux-3.12-zmq-requirements + name: Linux Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.12/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/linux.in + + - id: pip-tools-compile alias: compile-pkg-freebsd-3.8-zmq-requirements name: FreeBSD Packaging Py3.8 ZeroMQ Requirements @@ -215,6 +281,21 @@ repos: - --no-emit-index-url - requirements/static/pkg/freebsd.in + - id: pip-tools-compile + alias: compile-pkg-freebsd-3.12-zmq-requirements + name: FreeBSD Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.12/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/freebsd.in + - id: pip-tools-compile alias: compile-pkg-darwin-3.9-zmq-requirements name: Darwin Packaging Py3.9 ZeroMQ Requirements @@ -258,6 +339,19 @@ repos: - requirements/static/pkg/darwin.in - id: pip-tools-compile + alias: compile-pkg-darwin-3.12-zmq-requirements + name: Darwin Packaging Py3.12 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|crypto|darwin)\.txt|static/pkg/(darwin\.in|py3\.12/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --include=requirements/darwin.txt + - --no-emit-index-url + - requirements/static/pkg/darwin.in + alias: compile-pkg-windows-3.8-zmq-requirements name: Windows Packaging Py3.8 ZeroMQ Requirements files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.8/windows\.txt))$ @@ -313,6 +407,20 @@ repos: - --no-emit-index-url - requirements/static/pkg/windows.in + - id: pip-tools-compile + alias: compile-pkg-windows-3.12-zmq-requirements + name: Windows Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.12/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --include=requirements/windows.txt + - --no-emit-index-url + - requirements/static/pkg/windows.in + # <---- Packaging Requirements ------------------------------------------------------------------------------------- # ----- CI Requirements -------------------------------------------------------------------------------------------> @@ -389,6 +497,23 @@ repos: - requirements/static/ci/linux.in - id: pip-tools-compile + alias: compile-ci-linux-3.12-zmq-requirements + name: Linux CI Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.12/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/linux.in + alias: compile-ci-linux-crypto-3.8-requirements name: Linux CI Py3.8 Crypto Requirements files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/linux-crypto\.txt))$ @@ -433,6 +558,7 @@ repos: - id: pip-tools-compile alias: compile-ci-linux-crypto-3.11-requirements name: Linux CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/linux-crypto\.txt))$ pass_filenames: false args: @@ -444,6 +570,22 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-linux-crypto-3.12-requirements + name: Linux CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/linux-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --out-prefix=linux + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile alias: compile-ci-freebsd-3.8-zmq-requirements name: FreeBSD CI Py3.8 ZeroMQ Requirements @@ -516,6 +658,24 @@ repos: - --no-emit-index-url - requirements/static/ci/freebsd.in + - id: pip-tools-compile + alias: compile-ci-freebsd-3.12-zmq-requirements + name: FreeBSD CI Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.12/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/freebsd.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/freebsd.in + - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.8-requirements name: FreeBSD CI Py3.8 Crypto Requirements @@ -575,6 +735,21 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-freebsd-crypto-3.12-requirements + name: FreeBSD CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/freebsd-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --out-prefix=freebsd + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-darwin-3.9-zmq-requirements name: Darwin CI Py3.9 ZeroMQ Requirements @@ -626,6 +801,23 @@ repos: - --no-emit-index-url - requirements/static/ci/darwin.in + - id: pip-tools-compile + alias: compile-ci-darwin-3.12-zmq-requirements + name: Darwin CI Py3.12 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.12/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --include=requirements/darwin.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/darwin.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/darwin.in + - id: pip-tools-compile alias: compile-ci-darwin-crypto-3.9-requirements name: Darwin CI Py3.9 Crypto Requirements @@ -668,6 +860,20 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-darwin-crypto-3.12-requirements + name: Darwin CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/darwin-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --out-prefix=darwin + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-windows-3.8-zmq-requirements name: Windows CI Py3.8 ZeroMQ Requirements @@ -737,6 +943,22 @@ repos: - requirements/static/ci/windows.in - id: pip-tools-compile + alias: compile-ci-windows-3.12-zmq-requirements + name: Windows CI Py3.12 ZeroMQ Requirements + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.12/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --include=requirements/windows.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/windows.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/windows.in + alias: compile-ci-windows-crypto-3.8-requirements name: Windows CI Py3.8 Crypto Requirements files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/windows-crypto\.txt))$ @@ -792,8 +1014,21 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in - # <---- CI Requirements -------------------------------------------------------------------------------------------- + - id: pip-tools-compile + alias: compile-ci-windows-crypto-3.12-requirements + name: Windows CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/windows-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --out-prefix=windows + - --no-emit-index-url + - requirements/static/ci/crypto.in + # <---- CI Requirements -------------------------------------------------------------------------------------------- # ----- Cloud CI Requirements -------------------------------------------------------------------------------------> - id: pip-tools-compile @@ -843,6 +1078,23 @@ repos: - --py-version=3.11 - --no-emit-index-url - requirements/static/ci/cloud.in + + - id: pip-tools-compile + alias: compile-ci-cloud-3.12-requirements + name: Cloud CI Py3.12 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.12/cloud\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/cloud.in # <---- Cloud CI Requirements -------------------------------------------------------------------------------------- # ----- Doc CI Requirements ---------------------------------------------------------------------------------------> @@ -897,6 +1149,22 @@ repos: - --no-emit-index-url - --platform=linux - requirements/static/ci/docs.in + + - id: pip-tools-compile + alias: compile-doc-requirements + name: Docs CI Py3.12 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/ci/docs.in + # <---- Doc CI Requirements ---------------------------------------------------------------------------------------- # ----- Lint CI Requirements --------------------------------------------------------------------------------------> @@ -952,6 +1220,24 @@ repos: - --no-emit-index-url - requirements/static/ci/lint.in + - id: pip-tools-compile + alias: compile-ci-lint-3.12-requirements + name: Lint CI Py3.12 Requirements + files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.12/linux\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/lint.in + # <---- Lint CI Requirements --------------------------------------------------------------------------------------- # ----- Changelog -------------------------------------------------------------------------------------------------> @@ -1006,46 +1292,21 @@ repos: - --platform=linux - --no-emit-index-url - requirements/static/ci/changelog.in + + - id: pip-tools-compile + alias: compile-ci-changelog-3.12-requirements + name: Changelog CI Py3.12 Requirements + files: ^requirements/static/ci/(changelog\.in|py3\.12/(changelog|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --no-emit-index-url + - requirements/static/ci/changelog.in # <---- Changelog -------------------------------------------------------------------------------------------------- - # ----- Invoke ----------------------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-invoke-3.8-requirements - name: Linux CI Py3.8 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.8/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --no-emit-index-url - - requirements/static/ci/invoke.in - - - id: pip-tools-compile - alias: compile-ci-invoke-3.9-requirements - name: Linux CI Py3.9 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.9/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.9 - - --no-emit-index-url - - requirements/static/ci/invoke.in - - - id: pip-tools-compile - alias: compile-ci-invoke-3.10-requirements - name: Linux CI Py3.10 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.10/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.10 - - --no-emit-index-url - - requirements/static/ci/invoke.in - # <---- Invoke ----------------------------------------------------------------------------------------------------- - # ----- Tools ----------------------------------------------------------------------------------------------------> - id: pip-tools-compile alias: compile-ci-tools-3.9-requirements @@ -1082,6 +1343,58 @@ repos: - --py-version=3.11 - --no-emit-index-url - requirements/static/ci/tools.in + + - id: pip-tools-compile + alias: compile-ci-tools-3.12-requirements + name: Linux CI Py3.12 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.12/(tools|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --no-emit-index-url + - requirements/static/ci/tools.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.9-requirements + name: Linux CI Py3.9 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.9/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.9 + - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.10-requirements + name: Linux CI Py3.10 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.10/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.10 + - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.11-requirements + name: Linux CI Py3.11 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.11/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.11 + - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.12-requirements + name: Linux CI Py3.12 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.12/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.12 + - requirements/static/ci/tools-virustotal.in # <---- Tools ----------------------------------------------------------------------------------------------------- # ----- Code Formatting -------------------------------------------------------------------------------------------> @@ -1191,132 +1504,25 @@ repos: # <---- Security --------------------------------------------------------------------------------------------------- # ----- Pre-Commit ------------------------------------------------------------------------------------------------> - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-docs - name: Check Docs - files: ^(salt/.*\.py|doc/ref/.*\.rst)$ - args: - - docs.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - tornado - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-filemap - name: Check Filename Map Change Matching - files: ^tests/(filename_map\.yml|.*\.py)$ - pass_filenames: false - args: - - filemap.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - tornado - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: loader-check-virtual - name: Check loader modules __virtual__ - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - loader.check-virtual - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - tornado - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-docstrings - name: Check docstrings - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - docstrings.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - tornado - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-known-missing-docstrings - name: Check Known Missing Docstrings - stages: [manual] - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - docstrings.check - - --error-on-known-failures - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.0 + rev: v1.3.0 hooks: - id: mypy alias: mypy-tools name: Run mypy against tools files: ^tools/.*\.py$ - #args: [--strict] + exclude: > + (?x)^( + templates/.*| + salt/.*| + )$ additional_dependencies: - attrs - rich - types-attrs - types-pyyaml - types-requests + - python-tools-scripts>=0.18.4 - repo: https://github.com/saltstack/mirrors-nox rev: v2021.6.12 diff --git a/.pylintrc b/.pylintrc index be586e1ed34..3991b5df08e 100644 --- a/.pylintrc +++ b/.pylintrc @@ -698,7 +698,8 @@ allowed-3rd-party-modules=msgpack, ptscripts, packaging, looseversion, - pytestskipmarkers + pytestskipmarkers, + cryptography [EXCEPTIONS] diff --git a/changelog/64374.fixed.md b/changelog/64374.fixed.md new file mode 100644 index 00000000000..31dfc9b1b1d --- /dev/null +++ b/changelog/64374.fixed.md @@ -0,0 +1,6 @@ +Migrated all [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). + +* `tasks/docs.py` -> `tools/precommit/docs.py` +* `tasks/docstrings.py` -> `tools/precommit/docstrings.py` +* `tasks/loader.py` -> `tools/precommit/loader.py` +* `tasks/filemap.py` -> `tools/precommit/filemap.py` diff --git a/changelog/64455.added.md b/changelog/64455.added.md new file mode 100644 index 00000000000..8885a93e59f --- /dev/null +++ b/changelog/64455.added.md @@ -0,0 +1 @@ +Added Salt support for Amazon Linux 2023 diff --git a/changelog/64497.fixed.md b/changelog/64497.fixed.md new file mode 100644 index 00000000000..2d90737562d --- /dev/null +++ b/changelog/64497.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword +argument was not being respected. diff --git a/changelog/65287.fixed.md b/changelog/65287.fixed.md new file mode 100644 index 00000000000..e075d251820 --- /dev/null +++ b/changelog/65287.fixed.md @@ -0,0 +1 @@ +Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 diff --git a/changelog/65288.fixed.md b/changelog/65288.fixed.md new file mode 100644 index 00000000000..88581243382 --- /dev/null +++ b/changelog/65288.fixed.md @@ -0,0 +1 @@ +Preserve ownership on log rotation diff --git a/changelog/65316.fixed.md b/changelog/65316.fixed.md new file mode 100644 index 00000000000..f5f9e197e30 --- /dev/null +++ b/changelog/65316.fixed.md @@ -0,0 +1,4 @@ +Uprade relenv to 0.14.2 + - Update openssl to address CVE-2023-5363. + - Fix bug in openssl setup when openssl binary can't be found. + - Add M1 mac support. diff --git a/changelog/65340.fixed.md b/changelog/65340.fixed.md new file mode 100644 index 00000000000..ed26da9f3cd --- /dev/null +++ b/changelog/65340.fixed.md @@ -0,0 +1 @@ +Fix regex for filespec adding/deleting fcontext policy in selinux diff --git a/changelog/65358.fixed.md b/changelog/65358.fixed.md new file mode 100644 index 00000000000..9a9acc31b4d --- /dev/null +++ b/changelog/65358.fixed.md @@ -0,0 +1 @@ +Ensure CLI options take priority over Saltfile options diff --git a/changelog/65400.fixed.md b/changelog/65400.fixed.md new file mode 100644 index 00000000000..ae21abac9fe --- /dev/null +++ b/changelog/65400.fixed.md @@ -0,0 +1 @@ +Client only process events which tag conforms to an event return. diff --git a/changelog/65464.fixed.md b/changelog/65464.fixed.md new file mode 100644 index 00000000000..a931b6a6445 --- /dev/null +++ b/changelog/65464.fixed.md @@ -0,0 +1 @@ +Publish channel connect callback method properly closes it's request channel. diff --git a/changelog/65554.fixed.md b/changelog/65554.fixed.md new file mode 100644 index 00000000000..6d1598217e3 --- /dev/null +++ b/changelog/65554.fixed.md @@ -0,0 +1 @@ +Warn when an un-closed transport client is being garbage collected. diff --git a/changelog/65581.fixed.md b/changelog/65581.fixed.md new file mode 100644 index 00000000000..3ac7427b698 --- /dev/null +++ b/changelog/65581.fixed.md @@ -0,0 +1 @@ +Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. diff --git a/changelog/65584.fixed.md b/changelog/65584.fixed.md new file mode 100644 index 00000000000..1da48b32bb0 --- /dev/null +++ b/changelog/65584.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue where Salt Cloud would fail if it could not delete lingering +PAexec binaries diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index ead7ea09a2b..f30b579e934 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,5 +1,5 @@ nox_version: "2022.8.7" python_version: "3.10.13" -relenv_version: "0.13.11" +relenv_version: "0.14.2" release-branches: - "3006.x" diff --git a/noxfile.py b/noxfile.py index a99619fd086..74ad822b7c3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1552,7 +1552,7 @@ def lint_salt(session): paths = session.posargs else: # TBD replace paths entries when implement pyproject.toml - paths = ["setup.py", "noxfile.py", "salt/", "tasks/"] + paths = ["setup.py", "noxfile.py", "salt/"] _lint(session, ".pylintrc", flags, paths) @@ -1694,37 +1694,6 @@ def docs_man(session, compress, update, clean): os.chdir("..") -@nox.session(name="invoke", python="3") -def invoke(session): - """ - Run invoke tasks - """ - if _upgrade_pip_setuptools_and_wheel(session): - _install_requirements(session) - requirements_file = os.path.join( - "requirements", "static", "ci", _get_pydir(session), "invoke.txt" - ) - install_command = ["--progress-bar=off", "-r", requirements_file] - session.install(*install_command, silent=PIP_INSTALL_SILENT) - - cmd = ["inv"] - files = [] - - # Unfortunately, invoke doesn't support the nargs functionality like argpase does. - # Let's make it behave properly - for idx, posarg in enumerate(session.posargs): - if idx == 0: - cmd.append(posarg) - continue - if posarg.startswith("--"): - cmd.append(posarg) - continue - files.append(posarg) - if files: - cmd.append("--files={}".format(" ".join(files))) - session.run(*cmd) - - @nox.session(name="changelog", python="3") @nox.parametrize("draft", [False, True]) @nox.parametrize("force", [False, True]) @@ -1925,10 +1894,6 @@ def ci_test_onedir_pkgs(session): chunk = session.posargs.pop(0) cmd_args = chunks[chunk] - junit_report_filename = f"test-results-{chunk}" - runtests_log_filename = f"runtests-{chunk}" - - pydir = _get_pydir(session) if IS_LINUX: # Fetch the toolchain @@ -1950,12 +1915,39 @@ def ci_test_onedir_pkgs(session): + [ "-c", str(REPO_ROOT / "pkg-tests-pytest.ini"), - f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml", - f"--log-file=artifacts/logs/{runtests_log_filename}.log", + f"--junitxml=artifacts/xml-unittests-output/test-results-{chunk}.xml", + f"--log-file=artifacts/logs/runtests-{chunk}.log", ] + session.posargs ) - _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + try: + _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + except CommandFailed: + + # Don't print the system information, not the test selection on reruns + global PRINT_TEST_SELECTION + global PRINT_SYSTEM_INFO + PRINT_TEST_SELECTION = False + PRINT_SYSTEM_INFO = False + + pytest_args = ( + cmd_args[:] + + [ + "-c", + str(REPO_ROOT / "pkg-tests-pytest.ini"), + f"--junitxml=artifacts/xml-unittests-output/test-results-{chunk}-rerun.xml", + f"--log-file=artifacts/logs/runtests-{chunk}-rerun.log", + "--lf", + ] + + session.posargs + ) + _pytest( + session, + coverage=False, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) if chunk not in ("install", "download-pkgs"): cmd_args = chunks["install"] @@ -1965,8 +1957,8 @@ def ci_test_onedir_pkgs(session): "-c", str(REPO_ROOT / "pkg-tests-pytest.ini"), "--no-install", - f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml", - f"--log-file=artifacts/logs/{runtests_log_filename}.log", + f"--junitxml=artifacts/xml-unittests-output/test-results-install.xml", + f"--log-file=artifacts/logs/runtests-install.log", ] + session.posargs ) @@ -1974,5 +1966,31 @@ def ci_test_onedir_pkgs(session): pytest_args.append("--use-prev-version") if chunk in ("upgrade-classic", "downgrade-classic"): pytest_args.append("--classic") - _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + try: + _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + except CommandFailed: + cmd_args = chunks["install"] + pytest_args = ( + cmd_args[:] + + [ + "-c", + str(REPO_ROOT / "pkg-tests-pytest.ini"), + "--no-install", + f"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml", + f"--log-file=artifacts/logs/runtests-install-rerun.log", + "--lf", + ] + + session.posargs + ) + if "downgrade" in chunk: + pytest_args.append("--use-prev-version") + if chunk in ("upgrade-classic", "downgrade-classic"): + pytest_args.append("--classic") + _pytest( + session, + coverage=False, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) sys.exit(0) diff --git a/pkg/common/logrotate/salt-common b/pkg/common/logrotate/salt-common index 1bc063ebfdb..875c17e0cc6 100644 --- a/pkg/common/logrotate/salt-common +++ b/pkg/common/logrotate/salt-common @@ -4,7 +4,7 @@ rotate 7 compress notifempty - create 0640 salt salt + create 0640 } /var/log/salt/minion { @@ -13,6 +13,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/key { @@ -21,7 +22,7 @@ rotate 7 compress notifempty - create 0640 salt salt + create 0640 } /var/log/salt/api { @@ -30,7 +31,7 @@ rotate 7 compress notifempty - create 0640 salt salt + create 0640 } /var/log/salt/syndic { @@ -39,6 +40,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/proxy { @@ -47,4 +49,5 @@ rotate 7 compress notifempty + create 0640 } diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 4659c9fd343..1e9c31f08e4 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -463,8 +463,12 @@ if [ $1 -lt 2 ]; then # ensure hmac are up to date, master or minion, rest install one or the other # key used is from openssl/crypto/fips/fips_standalone_hmac.c openssl 1.1.1k if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/libssl.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/libcrypto.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi @@ -482,8 +486,12 @@ if [ $1 -lt 2 ]; then # ensure hmac are up to date, master or minion, rest install one or the other # key used is from openssl/crypto/fips/fips_standalone_hmac.c openssl 1.1.1k if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/libssl.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/libcrypto.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi @@ -537,8 +545,12 @@ if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then if [ -z "$(rpm -qi salt-minion | grep Name | grep salt-minion)" ]; then # uninstall and no minion running - /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/.libssl.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi fi @@ -552,8 +564,12 @@ if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then if [ -z "$(rpm -qi salt-master | grep Name | grep salt-master)" ]; then # uninstall and no master running - /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/.libssl.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi fi diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 36c60b0e57b..d550a118100 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -25,6 +25,9 @@ from tests.support.sminion import create_sminion log = logging.getLogger(__name__) +# Variable defining a FIPS test run or not +FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1" + @pytest.fixture(scope="session") def version(install_salt): @@ -336,6 +339,8 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): "rest_cherrypy": {"port": 8000, "disable_ssl": True}, "netapi_enable_clients": ["local"], "external_auth": {"auto": {"saltdev": [".*"]}}, + "fips_mode": FIPS_TESTRUN, + "open_mode": True, } test_user = False master_config = install_salt.config_path / "master" @@ -396,7 +401,6 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): scripts_dir = salt_factories.root_dir / "Scripts" scripts_dir.mkdir(exist_ok=True) salt_factories.scripts_dir = scripts_dir - config_overrides["open_mode"] = True python_executable = install_salt.bin_dir / "Scripts" / "python.exe" if install_salt.classic: python_executable = install_salt.bin_dir / "python.exe" @@ -469,6 +473,8 @@ def salt_minion(salt_factories, salt_master, install_salt): "id": minion_id, "file_roots": salt_master.config["file_roots"].copy(), "pillar_roots": salt_master.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, + "open_mode": True, } if platform.is_windows(): config_overrides[ diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index c3cd24a8e66..81542ec4583 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -255,8 +255,6 @@ def setup_redhat_family( repo_subpath, ): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" - if arch == "aarch64": - arch = "arm64" if repo_subpath == "minor": repo_url_base = ( diff --git a/pkg/tests/integration/test_pkg.py b/pkg/tests/integration/test_pkg.py index 5aedefa6ef1..bb84e5b9e27 100644 --- a/pkg/tests/integration/test_pkg.py +++ b/pkg/tests/integration/test_pkg.py @@ -1,4 +1,5 @@ import sys +import time import pytest @@ -8,12 +9,22 @@ def pkg_name(salt_call_cli, grains): if sys.platform.startswith("win"): ret = salt_call_cli.run("--local", "winrepo.update_git_repos") assert ret.returncode == 0 - ret = salt_call_cli.run("--local", "pkg.refresh_db") - assert ret.returncode == 0 + attempts = 3 + while attempts: + attempts -= 1 + ret = salt_call_cli.run("--local", "pkg.refresh_db") + if ret.returncode: + time.sleep(5) + continue + break + else: + pytest.fail("Failed to run 'pkg.refresh_db' 3 times.") return "putty" elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": return "snoopy" + elif grains["osfinger"] == "Amazon Linux-2023": + return "dnf-utils" return "units" elif grains["os_family"] == "Debian": return "ifenslave" diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 9d2634962be..f785c6854d2 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -6,6 +6,7 @@ import sys import packaging.version import psutil import pytest +from saltfactories.utils.tempfiles import temp_directory pytestmark = [ pytest.mark.skip_on_windows, @@ -135,9 +136,9 @@ def test_pkg_paths( Test package paths ownership """ if packaging.version.parse(install_salt.version) <= packaging.version.parse( - "3006.2" + "3006.4" ): - pytest.skip("Package path ownership was changed in salt 3006.3") + pytest.skip("Package path ownership was changed in salt 3006.4") salt_user_subdirs = [] for _path in pkg_paths: pkg_path = pathlib.Path(_path) @@ -170,3 +171,189 @@ def test_pkg_paths( else: assert file_path.owner() == "root" assert file_path.group() == "root" + + +@pytest.mark.skip_if_binaries_missing("logrotate") +def test_paths_log_rotation( + salt_master, salt_minion, salt_call_cli, install_salt, test_account +): + """ + Test the correct ownership is assigned when log rotation occurs + Change the user in the Salt Master, chage ownership, force logrotation + Check ownership and premissions. + Assumes test_pkg_paths successful + """ + if packaging.version.parse(install_salt.version) <= packaging.version.parse( + "3006.4" + ): + pytest.skip("Package path ownership was changed in salt 3006.4") + + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): + pytest.skip( + "Only tests RedHat family packages till logrotation paths are resolved on Ubuntu/Debian, see issue 65231" + ) + + # check that the salt_master is running + assert salt_master.is_running() + match = False + for proc in psutil.Process(salt_master.pid).children(): + assert proc.username() == "salt" + match = True + + assert match + + # Paths created by package installs with adjustment for current conf_dir /etc/salt + log_pkg_paths = [ + install_salt.conf_dir, # "bkup0" + "/var/cache/salt", # "bkup1" + "/var/log/salt", # "bkup2" + "/var/run/salt", # "bkup3" + "/opt/saltstack/salt", # "bkup4" + ] + + # backup those about to change + bkup_count = 0 + bkup_count_max = 5 + with temp_directory("bkup0") as temp_dir_path_0: + with temp_directory("bkup1") as temp_dir_path_1: + with temp_directory("bkup2") as temp_dir_path_2: + with temp_directory("bkup3") as temp_dir_path_3: + with temp_directory("bkup4") as temp_dir_path_4: + + assert temp_dir_path_0.is_dir() + assert temp_dir_path_1.is_dir() + assert temp_dir_path_2.is_dir() + assert temp_dir_path_3.is_dir() + assert temp_dir_path_4.is_dir() + + # stop the salt_master, so can change user + with salt_master.stopped(): + assert salt_master.is_running() is False + + for _path in log_pkg_paths: + if bkup_count == 0: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_0)}/" + ) + elif bkup_count == 1: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_1)}/" + ) + elif bkup_count == 2: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_2)}/" + ) + elif bkup_count == 3: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_3)}/" + ) + elif bkup_count == 4: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_4)}/" + ) + elif bkup_count > 5: + assert bkupcount < bkup_count_max # force assertion + + ret = salt_call_cli.run( + "--local", "cmd.run", cmd_to_run + ) + bkup_count += 1 + assert ret.returncode == 0 + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + "user: salt", + f"user: {test_account.username}", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # change ownership of appropriate paths to user + for _path in log_pkg_paths: + chg_ownership_cmd = ( + f"chown -R {test_account.username} {_path}" + ) + ret = salt_call_cli.run( + "--local", "cmd.run", chg_ownership_cmd + ) + assert ret.returncode == 0 + + # restart the salt_master + with salt_master.started(): + assert salt_master.is_running() is True + + # ensure some data in files + log_files_list = [ + "/var/log/salt/api", + "/var/log/salt/key", + "/var/log/salt/master", + ] + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + with log_path.open("a") as f: + f.write("This is a log rotation test\n") + + # force log rotation + logr_conf_file = "/etc/logrotate.d/salt" + logr_conf_path = pathlib.Path(logr_conf_file) + if not logr_conf_path.exists(): + logr_conf_file = "/etc/logrotate.conf" + logr_conf_path = pathlib.Path(logr_conf_file) + assert logr_conf_path.exists() + + # force log rotation + log_rotate_cmd = f"logrotate -f {logr_conf_file}" + ret = salt_call_cli.run( + "--local", "cmd.run", log_rotate_cmd + ) + assert ret.returncode == 0 + + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + assert log_path.owner() == test_account.username + assert log_path.stat().st_mode & 0o7777 == 0o640 + + # cleanup + assert salt_master.is_running() is False + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + f"user: {test_account.username}", + "user: salt", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # restore from backed up + bkup_count = 0 + for _path in log_pkg_paths: + if bkup_count == 0: + cmd_to_run = f"cp -a --force {str(temp_dir_path_0)}/* {_path}/" + elif bkup_count == 1: + cmd_to_run = f"cp -a --force {str(temp_dir_path_1)}/* {_path}/" + elif bkup_count == 2: + cmd_to_run = f"cp -a --force {str(temp_dir_path_2)}/* {_path}/" + elif bkup_count == 3: + cmd_to_run = f"cp -a --force {str(temp_dir_path_3)}/* {_path}/" + elif bkup_count == 4: + # use --update since /opt/saltstack/salt and would get SIGSEGV since mucking with running code + cmd_to_run = f"cp -a --update --force {str(temp_dir_path_4)}/* {_path}/" + elif bkup_count > 5: + assert bkupcount < bkup_count_max # force assertion + + ret = salt_call_cli.run( + "--local", "cmd.run", cmd_to_run + ) + + bkup_count += 1 + assert ret.returncode == 0 diff --git a/pkg/tests/integration/test_systemd_config.py b/pkg/tests/integration/test_systemd_config.py index 05a4c852cb6..6c530b51db2 100644 --- a/pkg/tests/integration/test_systemd_config.py +++ b/pkg/tests/integration/test_systemd_config.py @@ -7,17 +7,13 @@ pytestmark = [ ] -def test_system_config(salt_cli, salt_minion): +@pytest.mark.usefixtures("salt_minion") +def test_system_config(grains): """ Test system config """ - get_family = salt_cli.run("grains.get", "os_family", minion_tgt=salt_minion.id) - assert get_family.returncode == 0 - get_finger = salt_cli.run("grains.get", "osfinger", minion_tgt=salt_minion.id) - assert get_finger.returncode == 0 - - if get_family.data == "RedHat": - if get_finger.data in ( + if grains["os_family"] == "RedHat": + if grains["osfinger"] in ( "CentOS Stream-8", "CentOS Linux-8", "CentOS Stream-9", @@ -25,25 +21,22 @@ def test_system_config(salt_cli, salt_minion): "VMware Photon OS-3", "VMware Photon OS-4", "VMware Photon OS-5", + "Amazon Linux-2023", ): - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 0 + expected_retcode = 0 else: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 1 + expected_retcode = 1 + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == expected_retcode - elif "Debian" in get_family.stdout: - if "Debian-9" in get_finger.stdout: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 1 + elif grains["os_family"] == "Debian": + if grains["osfinger"] == "Debian-9": + expected_retcode = 1 else: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 0 + expected_retcode = 0 + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == expected_retcode diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py index c0b3d8e2d28..eec69554783 100644 --- a/pkg/tests/integration/test_version.py +++ b/pkg/tests/integration/test_version.py @@ -1,5 +1,6 @@ import os.path import pathlib +import re import subprocess import pytest @@ -42,7 +43,22 @@ def test_salt_versions_report_minion(salt_cli, salt_minion): """ Test running test.versions_report on minion """ - ret = salt_cli.run("test.versions_report", minion_tgt=salt_minion.id) + # Make sure the minion is running + assert salt_minion.is_running() + # Make sure we can ping the minion ... + ret = salt_cli.run( + "--timeout=240", "test.ping", minion_tgt=salt_minion.id, _timeout=240 + ) + assert ret.returncode == 0 + assert ret.data is True + ret = salt_cli.run( + "--hard-crash", + "--failhard", + "--timeout=240", + "test.versions_report", + minion_tgt=salt_minion.id, + _timeout=240, + ) ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) @@ -109,14 +125,14 @@ def test_compare_pkg_versions_redhat_rc(version, install_salt): package of the same version. For example, v3004~rc1 should be less than v3004. """ - if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora", "photon"): pytest.skip("Only tests rpm packages") pkg = [x for x in install_salt.pkgs if "rpm" in x] if not pkg: pytest.skip("Not testing rpm packages") pkg = pkg[0].split("/")[-1] - if "rc" not in pkg: + if "rc" not in ".".join(pkg.split(".")[:2]): pytest.skip("Not testing an RC package") assert "~" in pkg comp_pkg = pkg.split("~")[0] diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index d6e13da9e37..130a82987a5 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -78,20 +78,21 @@ class SaltPkgInstall: distro_name: str = attr.ib(init=False) distro_version: str = attr.ib(init=False) - # Package (and management) metadata - pkg_mngr: str = attr.ib(init=False) - rm_pkg: str = attr.ib(init=False) - salt_pkgs: List[str] = attr.ib(init=False) - pkgs: List[str] = attr.ib(factory=list) - file_ext: bool = attr.ib(default=None) - relenv: bool = attr.ib(default=True) - # Version information prev_version: str = attr.ib() use_prev_version: str = attr.ib() artifact_version: str = attr.ib(init=False) version: str = attr.ib(init=False) + # Package (and management) metadata + pkg_mngr: str = attr.ib(init=False) + rm_pkg: str = attr.ib(init=False) + dbg_pkg: str = attr.ib(init=False) + salt_pkgs: List[str] = attr.ib(init=False) + pkgs: List[str] = attr.ib(factory=list) + file_ext: bool = attr.ib(default=None) + relenv: bool = attr.ib(default=True) + @proc.default def _default_proc(self): return Subprocess() @@ -106,11 +107,16 @@ class SaltPkgInstall: @distro_name.default def _default_distro_name(self): - if distro.name(): - return distro.name().split()[0].lower() + name = distro.name() + if name: + if "vmware" in name.lower(): + return name.split()[1].lower() + return name.split()[0].lower() @distro_version.default def _default_distro_version(self): + if self.distro_name == "photon": + return distro.version().split(".")[0] return distro.version().lower() @pkg_mngr.default @@ -129,6 +135,15 @@ class SaltPkgInstall: elif self.distro_id in ("ubuntu", "debian"): return "purge" + @dbg_pkg.default + def _default_dbg_pkg(self): + dbg_pkg = None + if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): + dbg_pkg = "salt-debuginfo" + elif self.distro_id in ("ubuntu", "debian"): + dbg_pkg = "salt-dbg" + return dbg_pkg + @salt_pkgs.default def _default_salt_pkgs(self): salt_pkgs = [ @@ -143,6 +158,9 @@ class SaltPkgInstall: salt_pkgs.append("salt") elif self.distro_id in ("ubuntu", "debian"): salt_pkgs.append("salt-common") + if packaging.version.parse(self.version) >= packaging.version.parse("3006.3"): + if self.dbg_pkg: + salt_pkgs.append(self.dbg_pkg) return salt_pkgs @install_dir.default @@ -445,9 +463,14 @@ class SaltPkgInstall: ] log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) args = extra_args + self.pkgs + upgrade_cmd = "upgrade" + if self.distro_id == "photon": + # tdnf does not detect nightly build versions to be higher version + # than release versions + upgrade_cmd = "install" ret = self.proc.run( self.pkg_mngr, - "upgrade", + upgrade_cmd, "-y", *args, _timeout=120, @@ -511,7 +534,14 @@ class SaltPkgInstall: if self.classic: root_url = "py3/" - if self.distro_name in ["redhat", "centos", "amazon", "fedora", "vmware"]: + if self.distro_name in [ + "redhat", + "centos", + "amazon", + "fedora", + "vmware", + "photon", + ]: # Removing EPEL repo files for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"): fp.unlink() @@ -522,7 +552,12 @@ class SaltPkgInstall: gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.pub" if platform.is_aarch64(): - arch = "aarch64" + arch = "arm64" + # Starting with 3006.5, we prioritize the aarch64 repo paths for rpm-based distros + if packaging.version.parse( + self.prev_version + ) >= packaging.version.parse("3006.5"): + arch = "aarch64" else: arch = "x86_64" ret = self.proc.run( @@ -535,7 +570,11 @@ class SaltPkgInstall: f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}.repo", f"/etc/yum.repos.d/salt-{distro_name}.repo", ) - ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") + if self.distro_name == "photon": + # yum version on photon doesn't support expire-cache + ret = self.proc.run(self.pkg_mngr, "clean", "all") + else: + ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") self._check_retcode(ret) cmd_action = "downgrade" if downgrade else "install" pkgs_to_install = self.salt_pkgs.copy() @@ -549,6 +588,11 @@ class SaltPkgInstall: idx = list_ret.index("Available Packages") old_ver = list_ret[idx + 1].split()[1] pkgs_to_install = [f"{pkg}-{old_ver}" for pkg in pkgs_to_install] + if self.dbg_pkg: + # self.dbg_pkg does not exist on classic packages + dbg_exists = [x for x in pkgs_to_install if self.dbg_pkg in x] + if dbg_exists: + pkgs_to_install.remove(dbg_exists[0]) cmd_action = "install" ret = self.proc.run( self.pkg_mngr, diff --git a/requirements/pytest.txt b/requirements/pytest.txt index cafa2ec25a1..c497736194f 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -2,7 +2,7 @@ mock >= 3.0.0 # PyTest docker pytest >= 7.2.0 -pytest-salt-factories >= 1.0.0rc27 +pytest-salt-factories >= 1.0.0rc28 pytest-helpers-namespace >= 2019.1.8 pytest-subtests pytest-timeout diff --git a/requirements/static/ci/invoke.in b/requirements/static/ci/invoke.in deleted file mode 100644 index 4b924892386..00000000000 --- a/requirements/static/ci/invoke.in +++ /dev/null @@ -1,5 +0,0 @@ ---constraint=./py{py_version}/{platform}.txt - -invoke -blessings -pyyaml diff --git a/requirements/static/ci/py3.10/changelog.txt b/requirements/static/ci/py3.10/changelog.txt index cdae8363e34..45d0f5a6902 100644 --- a/requirements/static/ci/py3.10/changelog.txt +++ b/requirements/static/ci/py3.10/changelog.txt @@ -17,11 +17,11 @@ jinja2==3.1.2 # via # -c requirements/static/ci/py3.10/linux.txt # towncrier -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/changelog.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.10/linux.txt # jinja2 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index c872cedd0dd..07329e10ee2 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -12,7 +12,7 @@ certifi==2023.07.22 # via # -c requirements/static/ci/py3.10/linux.txt # requests -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/py3.10/linux.txt # cryptography @@ -20,7 +20,7 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.10/linux.txt # requests -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/py3.10/linux.txt # pyspnego diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 956d3bf522b..c663fa55ae3 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -56,7 +56,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/static/ci/common.in @@ -67,7 +67,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -90,7 +89,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -99,7 +98,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -116,7 +115,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -208,7 +207,7 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -220,7 +219,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -241,7 +240,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -286,7 +285,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -294,7 +293,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -349,7 +348,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -386,11 +385,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # tempora @@ -409,7 +408,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/zeromq.txt @@ -521,7 +520,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 6b259aeacac..b89a38d7585 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -70,7 +70,7 @@ markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.10/linux.txt # jinja2 @@ -103,7 +103,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.15.1 # via sphinx -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/py3.10/linux.txt # tempora diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index ffc467b40c0..d2df28c3830 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -56,7 +56,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/ci/common.in @@ -66,7 +66,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -89,7 +88,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -98,7 +97,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -115,7 +114,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -211,7 +210,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -223,7 +222,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -244,7 +243,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -290,7 +289,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -298,7 +297,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -353,7 +352,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -390,11 +389,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # tempora @@ -413,7 +412,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/zeromq.txt @@ -526,7 +525,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 4d31a8e5e29..c2d000bef8a 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -6,16 +6,16 @@ # aiohttp-retry==2.8.3 # via twilio -aiohttp==3.8.5 +aiohttp==3.9.0 # via # aiohttp-retry # etcd3-py # twilio aiosignal==1.3.1 # via aiohttp -ansible-core==2.15.0 +ansible-core==2.16.0 # via ansible -ansible==8.0.0 ; python_version >= "3.9" +ansible==9.0.1 ; python_version >= "3.9" # via -r requirements/static/ci/linux.in anyio==3.7.0 # via httpcore @@ -69,7 +69,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/ci/common.in @@ -80,7 +80,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -103,7 +102,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -113,7 +112,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -132,7 +131,7 @@ exceptiongroup==1.1.1 # via # anyio # pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -237,7 +236,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -249,7 +248,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -270,7 +269,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -317,7 +316,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -325,7 +324,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -388,7 +387,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -427,13 +426,13 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt python-telegram-bot==20.3 # via -r requirements/static/ci/linux.in -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # tempora @@ -454,7 +453,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/zeromq.txt @@ -591,7 +590,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.10/tools-virustotal.txt b/requirements/static/ci/py3.10/tools-virustotal.txt new file mode 100644 index 00000000000..e06b0a09c78 --- /dev/null +++ b/requirements/static/ci/py3.10/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.10/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index 6eb18846050..c2981fe216a 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/tools.txt requirements/static/ci/tools.in # -attrs==23.1.0 +attrs==20.3.0 # via # -r requirements/static/ci/tools.in # python-tools-scripts @@ -14,49 +14,69 @@ botocore==1.29.152 # via # boto3 # s3transfer -certifi==2023.07.22 - # via requests +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests charset-normalizer==3.2.0 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests idna==3.4 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests jinja2==3.1.2 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/ci/tools.in jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # boto3 # botocore markdown-it-py==3.0.0 # via rich markupsafe==2.1.3 - # via jinja2 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # jinja2 mdurl==0.1.2 # via markdown-it-py packaging==23.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/ci/tools.in pygments==2.15.1 # via rich python-dateutil==2.8.2 - # via botocore -python-tools-scripts==0.18.1 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # botocore +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/ci/tools.in requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # python-tools-scripts - # virustotal3 rich==13.4.2 # via python-tools-scripts s3transfer==0.6.1 # via boto3 six==1.16.0 - # via python-dateutil + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # python-dateutil typing-extensions==4.8.0 - # via python-tools-scripts + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # python-tools-scripts urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # botocore # requests -virustotal3==1.0.8 - # via -r requirements/static/ci/tools.in diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 3651ed2dca3..9c945c73d62 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -57,7 +57,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -86,7 +85,7 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -95,7 +94,7 @@ cryptography==41.0.4 # pyopenssl # pyspnego # requests-ntlm -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -114,7 +113,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -192,7 +191,7 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -203,7 +202,7 @@ lxml==4.9.2 ; sys_platform == "win32" # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -222,7 +221,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -243,7 +242,7 @@ patch==1.16 # via -r requirements/static/ci/windows.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -251,7 +250,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # cherrypy -psutil==5.8.0 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -306,7 +305,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -341,7 +340,7 @@ python-dateutil==2.8.2 # moto python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -349,7 +348,7 @@ pythonnet==3.0.1 ; sys_platform == "win32" # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # tempora @@ -373,7 +372,7 @@ pyyaml==6.0.1 # pytest-salt-factories # responses # yamllint -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/zeromq.txt @@ -463,7 +462,7 @@ urllib3==1.26.18 # python-etcd # requests # responses -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.11/changelog.txt b/requirements/static/ci/py3.11/changelog.txt index 7d5bf722b26..9169a689069 100644 --- a/requirements/static/ci/py3.11/changelog.txt +++ b/requirements/static/ci/py3.11/changelog.txt @@ -11,17 +11,17 @@ click==8.1.3 # -c requirements/static/ci/py3.11/linux.txt # click-default-group # towncrier -incremental==22.10.0 +incremental==17.5.0 # via towncrier jinja2==3.1.2 # via # -c requirements/static/ci/py3.11/linux.txt # towncrier -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/changelog.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.11/linux.txt # jinja2 diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 6f51f688fe7..b5fd661392e 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -12,7 +12,7 @@ certifi==2023.07.22 # via # -c requirements/static/ci/py3.11/linux.txt # requests -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/py3.11/linux.txt # cryptography @@ -20,7 +20,7 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.11/linux.txt # requests -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/py3.11/linux.txt # pyspnego @@ -34,22 +34,23 @@ netaddr==0.8.0 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/cloud.in +ntlm-auth==1.3.0 + # via requests-ntlm profitbricks==4.1.3 # via -r requirements/static/ci/cloud.in pycparser==2.21 # via # -c requirements/static/ci/py3.11/linux.txt # cffi -pypsexec==0.3.0 +pypsexec==0.1.0 # via -r requirements/static/ci/cloud.in -pyspnego==0.9.0 +pyspnego==0.8.0 # via # -r requirements/static/ci/cloud.in - # requests-ntlm # smbprotocol -pywinrm==0.4.3 +pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -requests-ntlm==1.2.0 +requests-ntlm==1.1.0 # via pywinrm requests==2.31.0 # via @@ -62,6 +63,7 @@ six==1.16.0 # via # -c requirements/static/ci/py3.11/linux.txt # profitbricks + # pypsexec # pywinrm smbprotocol==1.10.1 # via diff --git a/requirements/static/ci/py3.11/darwin-crypto.txt b/requirements/static/ci/py3.11/darwin-crypto.txt index 32377a2581b..c0aacf41077 100644 --- a/requirements/static/ci/py3.11/darwin-crypto.txt +++ b/requirements/static/ci/py3.11/darwin-crypto.txt @@ -6,5 +6,5 @@ # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in -pycryptodome==3.9.8 +pycryptodome==3.9.7 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 29f2761e139..24832cc7688 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -4,26 +4,28 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # pydantic apache-libcloud==3.7.0 ; sys_platform != "win32" # via -r requirements/static/ci/common.in asn1crypto==1.5.1 # via # certvalidator # oscrypto -async-timeout==4.0.2 - # via aiohttp attrs==23.1.0 # via # aiohttp # jsonschema + # pytest # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics autocommand==2.2.2 # via @@ -44,9 +46,9 @@ botocore==1.29.152 # boto3 # moto # s3transfer -cachetools==5.3.1 +cachetools==3.1.0 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in certifi==2023.07.22 # via @@ -56,7 +58,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/static/ci/common.in @@ -67,9 +69,8 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt - # aiohttp # requests -cheetah3==3.2.6.post1 +cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==10.0.0 # via @@ -90,7 +91,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt @@ -99,7 +100,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -114,11 +115,11 @@ docker==6.1.3 # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt -frozenlist==1.3.3 +frozenlist==1.3.0 # via # aiohttp # aiosignal @@ -128,7 +129,7 @@ future==0.18.3 # textfsm genshi==0.7.7 # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 +geomet==0.1.2 # via cassandra-driver gitdb==4.0.10 # via gitpython @@ -152,7 +153,7 @@ importlib-metadata==6.6.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # jaraco.text @@ -206,7 +207,7 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt @@ -218,18 +219,18 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt # jinja2 # mako # werkzeug -mercurial==6.4.4 +mercurial==6.0.1 # via -r requirements/static/ci/darwin.in mock==5.1.0 # via -r requirements/pytest.txt -more-itertools==9.1.0 +more-itertools==8.2.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/pytest.txt @@ -239,12 +240,12 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt # pytest-salt-factories -multidict==6.0.4 +multidict==6.0.2 # via # aiohttp # yarl @@ -284,7 +285,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -292,14 +293,14 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.3.0 +pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via @@ -313,7 +314,11 @@ pycryptodomex==3.9.8 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # pydantic +pydantic==2.5.2 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # inflect @@ -347,22 +352,22 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.4.1 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.11.0 +pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==2.1.0 +pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -384,11 +389,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # tempora @@ -407,7 +412,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/zeromq.txt @@ -496,13 +501,15 @@ ttp==0.9.5 # via # napalm # ttp-templates -types-pyyaml==6.0.1 +types-pyyaml==6.0.12.12 # via responses typing-extensions==4.8.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # inflect # napalm # pydantic + # pydantic-core # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.18 @@ -515,9 +522,9 @@ urllib3==1.26.18 # python-etcd # requests # responses -vcert==0.9.1 ; sys_platform != "win32" +vcert==0.7.4 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt index 8f207e03fe9..4854c2e02e3 100644 --- a/requirements/static/ci/py3.11/docs.txt +++ b/requirements/static/ci/py3.11/docs.txt @@ -6,6 +6,10 @@ # alabaster==0.7.13 # via sphinx +annotated-types==0.6.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pydantic autocommand==2.2.2 # via # -c requirements/static/ci/py3.11/linux.txt @@ -36,7 +40,7 @@ idna==3.4 # requests imagesize==1.4.1 # via sphinx -inflect==6.0.4 +inflect==7.0.0 # via # -c requirements/static/ci/py3.11/linux.txt # jaraco.text @@ -70,7 +74,7 @@ markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.11/linux.txt # jinja2 @@ -78,7 +82,7 @@ mdit-py-plugins==0.3.5 # via myst-docutils mdurl==0.1.2 # via markdown-it-py -more-itertools==9.1.0 +more-itertools==5.0.0 # via # -c requirements/static/ci/py3.11/linux.txt # cheroot @@ -95,7 +99,11 @@ portend==3.1.0 # via # -c requirements/static/ci/py3.11/linux.txt # cherrypy -pydantic==1.10.8 +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pydantic +pydantic==2.5.2 # via # -c requirements/static/ci/py3.11/linux.txt # inflect @@ -103,7 +111,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.15.1 # via sphinx -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/py3.11/linux.txt # tempora @@ -118,6 +126,7 @@ requests==2.31.0 six==1.16.0 # via # -c requirements/static/ci/py3.11/linux.txt + # more-itertools # sphinxcontrib-httpdomain snowballstemmer==2.2.0 # via sphinx @@ -149,8 +158,10 @@ tempora==5.3.0 typing-extensions==4.8.0 # via # -c requirements/static/ci/py3.11/linux.txt + # inflect # pydantic -uc-micro-py==1.0.2 + # pydantic-core +uc-micro-py==1.0.1 # via linkify-it-py urllib3==1.26.18 # via diff --git a/requirements/static/ci/py3.11/freebsd-crypto.txt b/requirements/static/ci/py3.11/freebsd-crypto.txt index 465896f60e9..33399b9ff51 100644 --- a/requirements/static/ci/py3.11/freebsd-crypto.txt +++ b/requirements/static/ci/py3.11/freebsd-crypto.txt @@ -6,5 +6,5 @@ # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in -pycryptodome==3.9.8 +pycryptodome==3.9.7 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 781e6afd9c7..3875f8af1e9 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -4,26 +4,28 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # pydantic apache-libcloud==3.7.0 ; sys_platform != "win32" # via -r requirements/static/ci/common.in asn1crypto==1.5.1 # via # certvalidator # oscrypto -async-timeout==4.0.2 - # via aiohttp attrs==23.1.0 # via # aiohttp # jsonschema + # pytest # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics autocommand==2.2.2 # via @@ -44,9 +46,9 @@ botocore==1.29.152 # boto3 # moto # s3transfer -cachetools==5.3.1 +cachetools==3.1.0 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in certifi==2023.07.22 # via @@ -56,7 +58,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/static/ci/common.in @@ -66,9 +68,8 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt - # aiohttp # requests -cheetah3==3.2.6.post1 +cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==10.0.0 # via @@ -89,7 +90,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt @@ -98,7 +99,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -113,11 +114,11 @@ docker==6.1.3 # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt -frozenlist==1.3.3 +frozenlist==1.3.0 # via # aiohttp # aiosignal @@ -151,7 +152,7 @@ importlib-metadata==6.6.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # jaraco.text @@ -209,7 +210,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt @@ -221,18 +222,18 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt # jinja2 # mako # werkzeug -mercurial==6.4.4 +mercurial==6.0.1 # via -r requirements/static/ci/freebsd.in mock==5.1.0 # via -r requirements/pytest.txt -more-itertools==9.1.0 +more-itertools==5.0.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/pytest.txt @@ -242,12 +243,12 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt # pytest-salt-factories -multidict==6.0.4 +multidict==6.0.2 # via # aiohttp # yarl @@ -288,7 +289,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -296,16 +297,16 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.3.0 +pyasn1-modules==0.2.4 # via google-auth -pyasn1==0.5.0 +pyasn1==0.4.8 # via # pyasn1-modules # rsa @@ -317,7 +318,11 @@ pycryptodomex==3.9.8 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # pydantic +pydantic==2.5.2 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # inflect @@ -351,22 +356,22 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.4.1 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.11.0 +pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==2.1.0 +pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -388,11 +393,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # tempora @@ -411,7 +416,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/zeromq.txt @@ -461,6 +466,7 @@ six==1.16.0 # junos-eznc # kazoo # kubernetes + # more-itertools # ncclient # python-dateutil # pyvmomi @@ -501,13 +507,15 @@ ttp==0.9.5 # via # napalm # ttp-templates -types-pyyaml==6.0.1 +types-pyyaml==6.0.12.12 # via responses typing-extensions==4.8.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # inflect # napalm # pydantic + # pydantic-core # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.18 @@ -520,9 +528,9 @@ urllib3==1.26.18 # python-etcd # requests # responses -vcert==0.9.1 ; sys_platform != "win32" +vcert==0.7.4 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.11/linux-crypto.txt b/requirements/static/ci/py3.11/linux-crypto.txt index 00752698956..89873b20c9e 100644 --- a/requirements/static/ci/py3.11/linux-crypto.txt +++ b/requirements/static/ci/py3.11/linux-crypto.txt @@ -6,5 +6,5 @@ # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in -pycryptodome==3.9.8 +pycryptodome==3.9.7 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index a7f4bdf04b6..5a2ddb8e6e8 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -6,18 +6,22 @@ # aiohttp-retry==2.8.3 # via twilio -aiohttp==3.8.5 +aiohttp==3.9.0 # via # aiohttp-retry # etcd3-py # twilio aiosignal==1.3.1 # via aiohttp -ansible-core==2.15.0 +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # pydantic +ansible-core==2.16.0 # via ansible -ansible==8.0.0 ; python_version >= "3.9" +ansible==9.0.1 ; python_version >= "3.9" # via -r requirements/static/ci/linux.in -anyio==3.7.0 +anyio==4.1.0 # via httpcore apache-libcloud==3.7.0 ; sys_platform != "win32" # via -r requirements/static/ci/common.in @@ -25,16 +29,14 @@ asn1crypto==1.5.1 # via # certvalidator # oscrypto -async-timeout==4.0.2 - # via aiohttp attrs==23.1.0 # via # aiohttp # jsonschema + # pytest # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics autocommand==2.2.2 # via @@ -55,9 +57,9 @@ botocore==1.29.152 # boto3 # moto # s3transfer -cachetools==5.3.1 +cachetools==4.2.2 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in certifi==2023.07.22 # via @@ -69,7 +71,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/static/ci/common.in @@ -80,9 +82,8 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt - # aiohttp # requests -cheetah3==3.2.6.post1 +cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==10.0.0 # via @@ -103,7 +104,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt @@ -113,7 +114,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -128,11 +129,11 @@ docker==6.1.3 # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt -frozenlist==1.3.3 +frozenlist==1.3.0 # via # aiohttp # aiosignal @@ -142,7 +143,7 @@ future==0.18.3 # textfsm genshi==0.7.7 # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 +geomet==0.1.2 # via cassandra-driver gitdb==4.0.10 # via gitpython @@ -154,7 +155,7 @@ h11==0.14.0 # via httpcore hglib==2.6.2 # via -r requirements/static/ci/linux.in -httpcore==0.17.2 +httpcore==0.17.3 # via httpx httpx==0.24.1 # via python-telegram-bot @@ -174,7 +175,7 @@ importlib-metadata==6.6.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # jaraco.text @@ -233,7 +234,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt @@ -245,18 +246,18 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt # jinja2 # mako # werkzeug -mercurial==6.4.4 +mercurial==6.0.1 # via -r requirements/static/ci/linux.in mock==5.1.0 # via -r requirements/pytest.txt -more-itertools==9.1.0 +more-itertools==5.0.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/pytest.txt @@ -266,12 +267,12 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt # pytest-salt-factories -multidict==6.0.4 +multidict==6.0.2 # via # aiohttp # yarl @@ -313,7 +314,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -321,16 +322,16 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.3.0 +pyasn1-modules==0.2.4 # via google-auth -pyasn1==0.5.0 +pyasn1==0.4.8 # via # pyasn1-modules # rsa @@ -342,7 +343,11 @@ pycryptodomex==3.9.8 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # pydantic +pydantic==2.5.2 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # inflect @@ -354,7 +359,7 @@ pyiface==0.0.11 # via -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in -pyjwt==2.7.0 +pyjwt==2.4.0 # via twilio pymysql==1.1.0 # via -r requirements/static/ci/linux.in @@ -384,22 +389,22 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.4.1 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.11.0 +pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==2.1.0 +pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -423,13 +428,13 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt python-telegram-bot==20.3 # via -r requirements/static/ci/linux.in -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # tempora @@ -450,7 +455,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/zeromq.txt @@ -512,6 +517,7 @@ six==1.16.0 # junos-eznc # kazoo # kubernetes + # more-itertools # ncclient # python-consul # python-dateutil @@ -564,13 +570,15 @@ ttp==0.9.5 # ttp-templates twilio==8.2.2 # via -r requirements/static/ci/linux.in -types-pyyaml==6.0.1 +types-pyyaml==6.0.12.12 # via responses typing-extensions==4.8.0 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt + # inflect # napalm # pydantic + # pydantic-core # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.18 @@ -583,9 +591,9 @@ urllib3==1.26.18 # python-etcd # requests # responses -vcert==0.9.1 ; sys_platform != "win32" +vcert==0.7.4 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.11/tools-virustotal.txt b/requirements/static/ci/py3.11/tools-virustotal.txt new file mode 100644 index 00000000000..96552535a12 --- /dev/null +++ b/requirements/static/ci/py3.11/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.11/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt index 50685cb72c3..1f89935a8f6 100644 --- a/requirements/static/ci/py3.11/tools.txt +++ b/requirements/static/ci/py3.11/tools.txt @@ -15,46 +15,64 @@ botocore==1.29.152 # boto3 # s3transfer certifi==2023.07.22 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests charset-normalizer==3.2.0 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests idna==3.4 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests jinja2==3.1.2 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/tools.in jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # boto3 # botocore markdown-it-py==3.0.0 # via rich markupsafe==2.1.3 - # via jinja2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jinja2 mdurl==0.1.2 # via markdown-it-py packaging==23.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/tools.in pygments==2.15.1 # via rich python-dateutil==2.8.2 - # via botocore -python-tools-scripts==0.18.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # botocore +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/tools.in requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # python-tools-scripts - # virustotal3 rich==13.4.2 # via python-tools-scripts s3transfer==0.6.1 # via boto3 six==1.16.0 - # via python-dateutil + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # python-dateutil urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # botocore # requests -virustotal3==1.0.8 - # via -r requirements/static/ci/tools.in diff --git a/requirements/static/ci/py3.11/windows-crypto.txt b/requirements/static/ci/py3.11/windows-crypto.txt index 9d20fb1abd6..25f318a71ba 100644 --- a/requirements/static/ci/py3.11/windows-crypto.txt +++ b/requirements/static/ci/py3.11/windows-crypto.txt @@ -4,7 +4,9 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows-crypto.txt requirements/static/ci/crypto.in # -m2crypto==0.38.0 +m2crypto==0.37.1 # via -r requirements/static/ci/crypto.in -pycryptodome==3.9.8 +parameterized==0.8.1 + # via m2crypto +pycryptodome==3.10.1 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 099017171de..96ad6ea95b5 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -4,20 +4,22 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp -async-timeout==4.0.2 - # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pydantic attrs==23.1.0 # via # aiohttp # jsonschema + # pytest # pytest-salt-factories # pytest-shell-utilities # pytest-skip-markers - # pytest-subtests # pytest-system-statistics autocommand==2.2.2 # via @@ -36,9 +38,9 @@ botocore==1.29.152 # boto3 # moto # s3transfer -cachetools==5.3.1 +cachetools==3.1.0 # via google-auth -cassandra-driver==3.28.0 +cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in certifi==2023.07.22 # via @@ -57,7 +59,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -86,16 +87,15 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt # etcd3-py # moto # pyopenssl - # pyspnego # requests-ntlm -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -112,7 +112,7 @@ docker==6.1.3 # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -122,7 +122,7 @@ frozenlist==1.3.3 # aiosignal genshi==0.7.7 # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 +geomet==0.1.2 # via cassandra-driver gitdb==4.0.10 # via gitpython @@ -144,7 +144,7 @@ importlib-metadata==6.6.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # jaraco.text @@ -190,18 +190,18 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt -lxml==4.9.2 ; sys_platform == "win32" +lxml==4.9.1 ; sys_platform == "win32" # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt @@ -210,7 +210,7 @@ markupsafe==2.1.2 # werkzeug mock==5.1.0 # via -r requirements/pytest.txt -more-itertools==9.1.0 +more-itertools==8.2.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/pytest.txt @@ -220,15 +220,17 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt # pytest-salt-factories -multidict==6.0.4 +multidict==6.0.2 # via # aiohttp # yarl +ntlm-auth==1.5.0 + # via requests-ntlm packaging==23.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt @@ -241,7 +243,7 @@ patch==1.16 # via -r requirements/static/ci/windows.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -249,14 +251,14 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # cherrypy -psutil==5.8.0 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.3.0 +pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via @@ -266,11 +268,15 @@ pycparser==2.21 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # cffi -pycryptodomex==3.9.8 +pycryptodomex==3.10.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pydantic +pydantic==2.5.2 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # inflect @@ -293,8 +299,6 @@ pyopenssl==23.2.0 # etcd3-py pyrsistent==0.19.3 # via jsonschema -pyspnego==0.9.0 - # via requests-ntlm pytest-custom-exit-code==0.3.0 # via -r requirements/pytest.txt pytest-helpers-namespace==2021.12.29 @@ -304,7 +308,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -313,13 +317,13 @@ pytest-skip-markers==1.5.0 # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.11.0 +pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.1.0 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -339,7 +343,7 @@ python-dateutil==2.8.2 # moto python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt @@ -347,20 +351,20 @@ pythonnet==3.0.1 ; sys_platform == "win32" # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # tempora pyvmomi==8.0.1.0.1 # via -r requirements/static/ci/common.in -pywin32==306 ; sys_platform == "win32" +pywin32==305 ; sys_platform == "win32" # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt # docker # pytest-skip-markers # wmi -pywinrm==0.4.3 +pywinrm==0.4.1 # via -r requirements/static/ci/windows.in pyyaml==6.0.1 # via @@ -371,12 +375,12 @@ pyyaml==6.0.1 # pytest-salt-factories # responses # yamllint -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-ntlm==1.2.0 +requests-ntlm==1.1.0 # via pywinrm requests==2.31.0 # via @@ -441,12 +445,14 @@ tornado==6.3.3 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt -types-pyyaml==6.0.1 +types-pyyaml==6.0.12.12 # via responses typing-extensions==4.8.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt + # inflect # pydantic + # pydantic-core # pytest-shell-utilities # pytest-system-statistics urllib3==1.26.18 @@ -459,7 +465,7 @@ urllib3==1.26.18 # python-etcd # requests # responses -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.12/changelog.txt b/requirements/static/ci/py3.12/changelog.txt new file mode 100644 index 00000000000..b9073087276 --- /dev/null +++ b/requirements/static/ci/py3.12/changelog.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/changelog.txt requirements/static/ci/changelog.in +# +click-default-group==1.2.2 + # via towncrier +click==8.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # click-default-group + # towncrier +incremental==17.5.0 + # via towncrier +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # towncrier +looseversion==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jinja2 +packaging==23.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/changelog.in +towncrier==22.12.0 + # via -r requirements/static/ci/changelog.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt new file mode 100644 index 00000000000..5e412042029 --- /dev/null +++ b/requirements/static/ci/py3.12/cloud.txt @@ -0,0 +1,814 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +aiosignal==1.3.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # pydantic +apache-libcloud==3.7.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/cloud.in + # -r requirements/static/ci/common.in +asn1crypto==1.5.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +bcrypt==4.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.26.152 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +botocore==1.29.152 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +cassandra-driver==3.28.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # cryptography + # napalm + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cheroot==10.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in +click==8.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # geomet +clustershell==1.9.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +croniter==1.3.15 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # etcd3-py + # moto + # paramiko + # pyopenssl + # pyspnego + # requests-ntlm + # smbprotocol + # vcert +distlib==0.3.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +distro==1.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==2.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +filelock==3.13.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +flaky==3.7.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # aiosignal +future==0.18.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm + # textfsm +genshi==0.7.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver +gitdb==4.0.10 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitpython +gitpython==3.1.40 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.19.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +iniconfig==2.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest +ipaddress==1.0.23 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # junos-eznc + # moto + # napalm +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +junit-xml==1.9 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +junos-eznc==2.6.7 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # napalm +jxmlease==1.0.3 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.9.0 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +lxml==4.9.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm + # ncclient + # xmldiff +mako==1.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # werkzeug +mock==5.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +moto==4.1.11 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # yarl +napalm==4.1.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.13 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm +netaddr==0.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/cloud.in + # junos-eznc + # napalm + # pyeapi +netmiko==4.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +netutils==1.6.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +ntc-templates==4.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # netmiko +ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==3.3.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # junos-eznc + # napalm + # ncclient + # netmiko + # scp +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +platformdirs==4.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +pluggy==1.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest +portend==3.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +profitbricks==4.1.3 + # via -r requirements/static/ci/cloud.in +psutil==5.9.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pyasn1-modules + # rsa +pycparser==2.21 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # inflect +pyeapi==1.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # etcd3-py +pyparsing==3.0.9 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc +pypsexec==0.1.0 + # via -r requirements/static/ci/cloud.in +pyrsistent==0.19.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jsonschema +pyserial==3.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # netmiko +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol +pytest-custom-exit-code==0.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pytz==2023.3.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # tempora +pyvmomi==8.0.1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pywinrm==0.3.0 + # via -r requirements/static/ci/cloud.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # clustershell + # junos-eznc + # kubernetes + # napalm + # netmiko + # pytest-salt-factories + # responses + # yamlordereddictloader +pyzmq==25.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # napalm + # profitbricks + # pywinrm + # requests-ntlm + # responses + # vcert +responses==0.23.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 ; sys_platform == "linux" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +rsa==4.9 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +s3transfer==0.6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 +scp==0.14.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm + # netmiko +semantic-version==2.10.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver + # etcd3-py + # genshi + # geomet + # google-auth + # jsonschema + # junit-xml + # junos-eznc + # kazoo + # kubernetes + # more-itertools + # ncclient + # profitbricks + # pypsexec + # python-dateutil + # pyvmomi + # pywinrm + # textfsm + # transitions + # vcert + # websocket-client +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec +smmap==5.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tempora==5.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm + # netmiko + # ntc-templates +timelib==0.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +toml==0.10.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tornado==6.3.3 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +transitions==0.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc +ttp-templates==0.3.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +ttp==0.9.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm + # ttp-templates +types-pyyaml==6.0.12.12 + # via responses +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # inflect + # napalm + # pydantic + # pydantic-core + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # botocore + # docker + # google-auth + # kubernetes + # python-etcd + # requests + # responses +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.24.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto + # pytest-httpserver +xmldiff==2.6.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +xmltodict==0.13.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto + # pywinrm +yamlordereddictloader==0.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc +yarl==1.9.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/darwin-crypto.txt b/requirements/static/ci/py3.12/darwin-crypto.txt new file mode 100644 index 00000000000..e67841ff8fa --- /dev/null +++ b/requirements/static/ci/py3.12/darwin-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt new file mode 100644 index 00000000000..00477de66c5 --- /dev/null +++ b/requirements/static/ci/py3.12/darwin.txt @@ -0,0 +1,563 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # pydantic +apache-libcloud==3.7.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +asn1crypto==1.5.1 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.text +bcrypt==4.0.1 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.26.152 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.29.152 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/static/ci/common.in + # cryptography + # napalm + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==10.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in +click==8.1.3 + # via geomet +clustershell==1.9.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +croniter==1.3.15 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.5 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.7 + # via virtualenv +distro==1.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==2.3.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.13.1 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +future==0.18.3 + # via + # napalm + # textfsm +genshi==0.7.7 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.10 + # via gitpython +gitpython==3.1.40 + # via -r requirements/static/ci/common.in +google-auth==2.19.1 + # via kubernetes +hglib==2.6.2 + # via -r requirements/static/ci/darwin.in +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.text +iniconfig==2.0.0 + # via pytest +ipaddress==1.0.23 + # via kubernetes +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # junos-eznc + # moto + # napalm +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +junit-xml==1.9 + # via -r requirements/static/ci/common.in +junos-eznc==2.6.7 ; sys_platform != "win32" + # via + # -r requirements/static/ci/common.in + # napalm +jxmlease==1.0.3 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +looseversion==1.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +lxml==4.9.2 + # via + # junos-eznc + # napalm + # ncclient + # xmldiff +mako==1.2.4 + # via -r requirements/static/ci/common.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # jinja2 + # mako + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/darwin.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +moto==4.1.11 + # via -r requirements/static/ci/common.in +msgpack==1.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +napalm==4.1.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +ncclient==0.6.13 + # via + # junos-eznc + # napalm +netaddr==0.8.0 + # via + # junos-eznc + # napalm + # pyeapi +netmiko==4.2.0 + # via napalm +netutils==1.6.0 + # via napalm +ntc-templates==4.0.1 + # via netmiko +oscrypto==1.3.0 + # via certvalidator +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==3.3.1 + # via + # junos-eznc + # napalm + # ncclient + # netmiko + # scp +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.11.1 + # via yamllint +platformdirs==4.0.0 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +psutil==5.9.6 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # inflect +pyeapi==1.0.0 + # via napalm +pygit2==1.13.1 + # via -r requirements/static/ci/darwin.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # etcd3-py +pyparsing==3.0.9 + # via junos-eznc +pyrsistent==0.19.3 + # via jsonschema +pyserial==3.5 + # via + # junos-eznc + # netmiko +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +pytz==2023.3.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # tempora +pyvmomi==8.0.1.0.1 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # clustershell + # junos-eznc + # kubernetes + # napalm + # netmiko + # pytest-salt-factories + # responses + # yamllint + # yamlordereddictloader +pyzmq==25.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # napalm + # responses + # vcert +responses==0.23.1 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.9 + # via google-auth +s3transfer==0.6.1 + # via boto3 +scp==0.14.5 + # via + # junos-eznc + # napalm + # netmiko +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cassandra-driver + # etcd3-py + # genshi + # geomet + # google-auth + # jsonschema + # junit-xml + # junos-eznc + # kubernetes + # ncclient + # python-dateutil + # pyvmomi + # textfsm + # transitions + # vcert + # websocket-client +smmap==5.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==5.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # portend +textfsm==1.1.3 + # via + # napalm + # netmiko + # ntc-templates +timelib==0.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.3.3 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +transitions==0.9.0 + # via junos-eznc +ttp-templates==0.3.5 + # via napalm +ttp==0.9.5 + # via + # napalm + # ttp-templates +types-pyyaml==6.0.12.12 + # via responses +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # inflect + # napalm + # pydantic + # pydantic-core + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # botocore + # docker + # google-auth + # kubernetes + # python-etcd + # requests + # responses +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.24.7 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==3.0.0 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmldiff==2.6.3 + # via -r requirements/static/ci/common.in +xmltodict==0.13.0 + # via moto +yamllint==1.32.0 + # via -r requirements/static/ci/darwin.in +yamlordereddictloader==0.4.0 + # via junos-eznc +yarl==1.9.2 + # via aiohttp +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt new file mode 100644 index 00000000000..853673e67fb --- /dev/null +++ b/requirements/static/ci/py3.12/docs.txt @@ -0,0 +1,268 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# +alabaster==0.7.13 + # via sphinx +annotated-types==0.6.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pydantic +autocommand==2.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +babel==2.12.1 + # via sphinx +certifi==2023.07.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +cffi==1.14.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cryptography +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheroot==10.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/docs.in +contextvars==2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +cryptography==41.0.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pyopenssl +distro==1.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +docutils==0.20.1 + # via sphinx +idna==3.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +imagesize==1.4.1 + # via sphinx +immutables==0.15 + # via + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/docs.in + # myst-docutils + # sphinx +jmespath==1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +linkify-it-py==1.0.3 + # via myst-docutils +looseversion==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +markdown-it-py==2.2.0 + # via + # mdit-py-plugins + # myst-docutils +markupsafe==2.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 +mdit-py-plugins==0.3.5 + # via myst-docutils +mdurl==0.1.2 + # via markdown-it-py +more-itertools==5.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +msgpack==1.0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +myst-docutils[linkify]==1.0.0 + # via -r requirements/static/ci/docs.in +packaging==23.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # sphinx +portend==3.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +psutil==5.9.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pycparser==2.21 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # inflect +pyenchant==3.2.2 + # via sphinxcontrib-spelling +pygments==2.15.1 + # via sphinx +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pytz==2023.3.post1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # tempora +pyyaml==6.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # myst-docutils +pyzmq==25.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt +requests==2.31.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # sphinx +rpm-vercmp==0.1.2 ; sys_platform == "linux" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +setproctitle==1.3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +six==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # more-itertools + # python-dateutil + # sphinxcontrib-httpdomain +snowballstemmer==2.2.0 + # via sphinx +sphinx==7.0.1 ; python_version >= "3.9" + # via + # -r requirements/static/ci/docs.in + # sphinxcontrib-httpdomain + # sphinxcontrib-spelling +sphinxcontrib-applehelp==1.0.4 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.1 + # via sphinx +sphinxcontrib-httpdomain==1.8.1 + # via -r requirements/static/ci/docs.in +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==8.0.0 + # via -r requirements/static/ci/docs.in +tempora==5.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # portend +timelib==0.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +tornado==6.3.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # inflect + # pydantic + # pydantic-core +uc-micro-py==1.0.1 + # via linkify-it-py +urllib3==1.26.18 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/freebsd-crypto.txt b/requirements/static/ci/py3.12/freebsd-crypto.txt new file mode 100644 index 00000000000..7bdbdbc6cad --- /dev/null +++ b/requirements/static/ci/py3.12/freebsd-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt new file mode 100644 index 00000000000..10218e73d17 --- /dev/null +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -0,0 +1,569 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # pydantic +apache-libcloud==3.7.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +asn1crypto==1.5.1 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.text +bcrypt==4.0.1 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.26.152 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.29.152 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.24.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # cryptography + # napalm + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==10.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in +click==8.1.3 + # via geomet +clustershell==1.9.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +croniter==1.3.15 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.5 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.7 + # via virtualenv +distro==1.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==2.3.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.13.1 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +future==0.18.3 + # via + # napalm + # textfsm +genshi==0.7.7 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.10 + # via gitpython +gitpython==3.1.40 + # via -r requirements/static/ci/common.in +google-auth==2.19.1 + # via kubernetes +hglib==2.6.2 + # via -r requirements/static/ci/freebsd.in +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.text +iniconfig==2.0.0 + # via pytest +ipaddress==1.0.23 + # via kubernetes +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # junos-eznc + # moto + # napalm +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +junit-xml==1.9 + # via -r requirements/static/ci/common.in +junos-eznc==2.6.7 ; sys_platform != "win32" + # via + # -r requirements/static/ci/common.in + # napalm +jxmlease==1.0.3 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.9.0 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +lxml==4.9.2 + # via + # junos-eznc + # napalm + # ncclient + # xmldiff +mako==1.2.4 + # via -r requirements/static/ci/common.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # jinja2 + # mako + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/freebsd.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +moto==4.1.11 + # via -r requirements/static/ci/common.in +msgpack==1.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +napalm==4.1.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +ncclient==0.6.13 + # via + # junos-eznc + # napalm +netaddr==0.8.0 + # via + # junos-eznc + # napalm + # pyeapi +netmiko==4.2.0 + # via napalm +netutils==1.6.0 + # via napalm +ntc-templates==4.0.1 + # via netmiko +oscrypto==1.3.0 + # via certvalidator +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==3.3.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -r requirements/static/ci/common.in + # junos-eznc + # napalm + # ncclient + # netmiko + # scp +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.11.1 + # via yamllint +platformdirs==4.0.0 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +psutil==5.9.6 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # inflect +pyeapi==1.0.0 + # via napalm +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # etcd3-py +pyparsing==3.0.9 + # via junos-eznc +pyrsistent==0.19.3 + # via jsonschema +pyserial==3.5 + # via + # junos-eznc + # netmiko +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +pytz==2023.3.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # tempora +pyvmomi==8.0.1.0.1 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # clustershell + # junos-eznc + # kubernetes + # napalm + # netmiko + # pytest-salt-factories + # responses + # yamllint + # yamlordereddictloader +pyzmq==25.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # napalm + # responses + # vcert +responses==0.23.1 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.9 + # via google-auth +s3transfer==0.6.1 + # via boto3 +scp==0.14.5 + # via + # junos-eznc + # napalm + # netmiko +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cassandra-driver + # etcd3-py + # genshi + # geomet + # google-auth + # jsonschema + # junit-xml + # junos-eznc + # kazoo + # kubernetes + # more-itertools + # ncclient + # python-dateutil + # pyvmomi + # textfsm + # transitions + # vcert + # websocket-client +smmap==5.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==5.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # portend +textfsm==1.1.3 + # via + # napalm + # netmiko + # ntc-templates +timelib==0.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.3.3 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +transitions==0.9.0 + # via junos-eznc +ttp-templates==0.3.5 + # via napalm +ttp==0.9.5 + # via + # napalm + # ttp-templates +types-pyyaml==6.0.12.12 + # via responses +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # inflect + # napalm + # pydantic + # pydantic-core + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # botocore + # docker + # google-auth + # kubernetes + # python-etcd + # requests + # responses +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.24.7 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==3.0.0 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmldiff==2.6.3 + # via -r requirements/static/ci/common.in +xmltodict==0.13.0 + # via moto +yamllint==1.32.0 + # via -r requirements/static/ci/freebsd.in +yamlordereddictloader==0.4.0 + # via junos-eznc +yarl==1.9.2 + # via aiohttp +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt new file mode 100644 index 00000000000..b15145f85a4 --- /dev/null +++ b/requirements/static/ci/py3.12/lint.txt @@ -0,0 +1,815 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp-retry==2.8.3 + # via twilio +aiohttp==3.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp-retry + # etcd3-py + # twilio +aiosignal==1.3.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # pydantic +ansible-core==2.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # ansible +ansible==9.0.1 ; python_version >= "3.9" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +anyio==4.1.0 + # via httpcore +apache-libcloud==3.7.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +asn1crypto==1.5.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator + # oscrypto +astroid==2.3.3 + # via pylint +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # jsonschema +autocommand==2.2.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +bcrypt==4.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.26.152 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +botocore==1.29.152 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +cassandra-driver==3.28.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # httpcore + # httpx + # kubernetes + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # cryptography + # napalm + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cheroot==10.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in +click==8.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # geomet +clustershell==1.9.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +croniter==1.3.15 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +distro==1.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +dnspython==2.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +filelock==3.13.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # aiosignal +future==0.18.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm + # textfsm +genshi==0.7.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver +gitdb==4.0.10 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitpython +gitpython==3.1.40 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.19.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +h11==0.14.0 + # via httpcore +hglib==2.6.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +httpcore==0.17.3 + # via httpx +httpx==0.24.1 + # via python-telegram-bot +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # anyio + # etcd3-py + # httpx + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +ipaddress==1.0.23 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +isort==4.3.21 + # via pylint +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # junos-eznc + # moto + # napalm +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +junit-xml==1.9 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +junos-eznc==2.6.7 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # napalm +jxmlease==1.0.3 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.9.0 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +lazy-object-proxy==1.4.3 + # via astroid +libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +lxml==4.9.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm + # ncclient + # xmldiff +mako==1.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # werkzeug +mccabe==0.6.1 + # via pylint +mercurial==6.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +modernize==0.5 + # via saltpylint +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +moto==4.1.11 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # yarl +napalm==4.1.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.13 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm +netaddr==0.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm + # pyeapi +netmiko==4.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +netutils==1.6.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +ntc-templates==4.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # netmiko +oscrypto==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core +paramiko==3.3.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # junos-eznc + # napalm + # ncclient + # netmiko + # scp +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pathspec==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # yamllint +platformdirs==4.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +portend==3.1.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +psutil==5.9.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pyasn1-modules + # rsa +pycodestyle==2.10.0 + # via saltpylint +pycparser==2.21 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # inflect +pyeapi==1.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # twilio +pylint==2.4.4 + # via + # -r requirements/static/ci/lint.in + # saltpylint +pymysql==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # etcd3-py +pyparsing==3.0.9 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc +pyrsistent==0.19.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jsonschema +pyserial==3.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # netmiko +python-consul==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +python-telegram-bot==20.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pytz==2023.3.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # tempora + # twilio +pyvmomi==8.0.1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # junos-eznc + # kubernetes + # napalm + # netmiko + # responses + # yamllint + # yamlordereddictloader +pyzmq==25.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt +redis-py-cluster==2.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +redis==3.5.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # etcd3-py + # kubernetes + # moto + # napalm + # python-consul + # responses + # twilio + # vcert +resolvelib==1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # ansible-core +responses==0.23.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 ; sys_platform == "linux" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +rsa==4.9 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +s3transfer==0.6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 +saltpylint==2023.8.3 + # via -r requirements/static/ci/lint.in +scp==0.14.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc + # napalm + # netmiko +semantic-version==2.10.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # astroid + # cassandra-driver + # etcd3-py + # genshi + # geomet + # google-auth + # jsonschema + # junit-xml + # junos-eznc + # kazoo + # kubernetes + # more-itertools + # ncclient + # python-consul + # python-dateutil + # pyvmomi + # textfsm + # transitions + # vcert + # websocket-client +slack-bolt==1.18.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +slack-sdk==3.21.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # slack-bolt +smmap==5.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitdb +sniffio==1.3.0 + # via + # anyio + # httpcore + # httpx +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tempora==5.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm + # netmiko + # ntc-templates +timelib==0.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +toml==0.10.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/ci/lint.in +tornado==6.3.3 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +transitions==0.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc +ttp-templates==0.3.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm +ttp==0.9.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # napalm + # ttp-templates +twilio==8.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +types-pyyaml==6.0.12.12 + # via responses +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # inflect + # napalm + # pydantic + # pydantic-core +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # botocore + # google-auth + # kubernetes + # python-etcd + # requests + # responses +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.24.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +watchdog==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +wrapt==1.11.2 + # via astroid +xmldiff==2.6.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +xmltodict==0.13.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +yamllint==1.32.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +yamlordereddictloader==0.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # junos-eznc +yarl==1.9.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/linux-crypto.txt b/requirements/static/ci/py3.12/linux-crypto.txt new file mode 100644 index 00000000000..be01a017e8b --- /dev/null +++ b/requirements/static/ci/py3.12/linux-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/linux-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt new file mode 100644 index 00000000000..164381b6da1 --- /dev/null +++ b/requirements/static/ci/py3.12/linux.txt @@ -0,0 +1,627 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # pydantic +ansible-core==2.16.0 + # via ansible +ansible==9.0.1 + # via -r requirements/static/ci/linux.in +apache-libcloud==3.7.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via python-telegram-bot +asn1crypto==1.5.1 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.text +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==4.0.1 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.26.152 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.29.152 + # via + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # google-auth + # python-telegram-bot +cassandra-driver==3.28.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # napalm + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==10.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in +click==8.1.3 + # via geomet +clustershell==1.9.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +croniter==1.3.15 + # via -r requirements/static/ci/common.in +cryptography==41.0.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.7 + # via virtualenv +distro==1.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==2.3.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.13.1 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +future==0.18.3 + # via + # napalm + # textfsm +genshi==0.7.7 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.10 + # via gitpython +gitpython==3.1.40 + # via -r requirements/static/ci/common.in +google-auth==2.19.1 + # via kubernetes +hglib==2.6.2 + # via -r requirements/static/ci/linux.in +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.text +iniconfig==2.0.0 + # via pytest +ipaddress==1.0.23 + # via kubernetes +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # junos-eznc + # moto + # napalm +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +junit-xml==1.9 + # via -r requirements/static/ci/common.in +junos-eznc==2.6.7 ; sys_platform != "win32" + # via + # -r requirements/static/ci/common.in + # napalm +jxmlease==1.0.3 + # via -r requirements/static/ci/common.in +kazoo==2.9.0 + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.8.0 + # via -r requirements/static/ci/common.in +looseversion==1.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +lxml==4.9.2 + # via + # junos-eznc + # napalm + # ncclient + # xmldiff +mako==1.2.4 + # via -r requirements/static/ci/common.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/linux.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +moto==4.1.11 + # via -r requirements/static/ci/common.in +msgpack==1.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +napalm==4.1.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +ncclient==0.6.13 + # via + # junos-eznc + # napalm +netaddr==0.8.0 + # via + # junos-eznc + # napalm + # pyeapi +netmiko==4.2.0 + # via napalm +netutils==1.6.0 + # via napalm +ntc-templates==4.0.1 + # via netmiko +oscrypto==1.3.0 + # via certvalidator +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # docker + # pytest +paramiko==3.3.1 + # via + # -r requirements/static/ci/common.in + # junos-eznc + # napalm + # ncclient + # netmiko + # scp +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.11.1 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==4.0.0 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +psutil==5.9.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # inflect +pyeapi==1.0.0 + # via napalm +pygit2==1.13.1 + # via -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via twilio +pymysql==1.1.0 + # via -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # etcd3-py +pyparsing==3.0.9 + # via junos-eznc +pyrsistent==0.19.3 + # via jsonschema +pyserial==3.5 + # via + # junos-eznc + # netmiko +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-consul==1.1.0 + # via -r requirements/static/ci/linux.in +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +python-telegram-bot==20.3 + # via -r requirements/static/ci/linux.in +pytz==2023.3.post1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==8.0.1.0.1 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # junos-eznc + # kubernetes + # napalm + # netmiko + # pytest-salt-factories + # yamllint + # yamlordereddictloader +pyzmq==25.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +redis-py-cluster==2.1.3 + # via -r requirements/static/ci/linux.in +redis==3.5.3 + # via redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # napalm + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==1.0.1 + # via ansible-core +responses==0.23.1 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 ; sys_platform == "linux" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +rsa==4.9 + # via google-auth +s3transfer==0.6.1 + # via boto3 +scp==0.14.5 + # via + # junos-eznc + # napalm + # netmiko +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # apscheduler + # bcrypt + # cassandra-driver + # etcd3-py + # genshi + # geomet + # jsonschema + # junit-xml + # junos-eznc + # kazoo + # kubernetes + # more-itertools + # ncclient + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # textfsm + # transitions + # vcert + # virtualenv + # websocket-client +slack-bolt==1.18.0 + # via -r requirements/static/ci/linux.in +slack-sdk==3.21.3 + # via slack-bolt +smmap==5.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==5.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # portend +textfsm==1.1.3 + # via + # napalm + # netmiko + # ntc-templates +timelib==0.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.3.3 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # python-telegram-bot +transitions==0.9.0 + # via junos-eznc +ttp-templates==0.3.5 + # via napalm +ttp==0.9.5 + # via + # napalm + # ttp-templates +twilio==8.2.2 + # via -r requirements/static/ci/linux.in +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # inflect + # napalm + # pydantic + # pydantic-core + # pytest-shell-utilities + # pytest-system-statistics +tzlocal==3.0 + # via apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.24.7 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==3.0.0 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmldiff==2.6.3 + # via -r requirements/static/ci/common.in +xmltodict==0.13.0 + # via moto +yamllint==1.32.0 + # via -r requirements/static/ci/linux.in +yamlordereddictloader==0.4.0 + # via junos-eznc +yarl==1.9.2 + # via aiohttp +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/tools-virustotal.txt b/requirements/static/ci/py3.12/tools-virustotal.txt new file mode 100644 index 00000000000..03404d94f4d --- /dev/null +++ b/requirements/static/ci/py3.12/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.12/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.12/tools.txt b/requirements/static/ci/py3.12/tools.txt new file mode 100644 index 00000000000..d5de223da89 --- /dev/null +++ b/requirements/static/ci/py3.12/tools.txt @@ -0,0 +1,78 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/tools.txt requirements/static/ci/tools.in +# +attrs==23.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.26.152 + # via -r requirements/static/ci/tools.in +botocore==1.29.152 + # via + # boto3 + # s3transfer +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/tools.in +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # boto3 + # botocore +markdown-it-py==3.0.0 + # via rich +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # jinja2 +mdurl==0.1.2 + # via markdown-it-py +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/tools.in +pygments==2.15.1 + # via rich +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # botocore +python-tools-scripts==0.18.5 + # via -r requirements/static/ci/tools.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/tools.in +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # python-tools-scripts +rich==13.4.2 + # via python-tools-scripts +s3transfer==0.6.1 + # via boto3 +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # python-dateutil +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # botocore + # requests diff --git a/requirements/static/ci/py3.12/windows-crypto.txt b/requirements/static/ci/py3.12/windows-crypto.txt new file mode 100644 index 00000000000..ec84d96324e --- /dev/null +++ b/requirements/static/ci/py3.12/windows-crypto.txt @@ -0,0 +1,12 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/windows-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.37.1 + # via -r requirements/static/ci/crypto.in +parameterized==0.8.1 + # via m2crypto +pycryptodome==3.10.1 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt new file mode 100644 index 00000000000..48da4d692e2 --- /dev/null +++ b/requirements/static/ci/py3.12/windows.txt @@ -0,0 +1,506 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +annotated-types==0.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pydantic +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +autocommand==2.2.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.text +bcrypt==4.0.1 + # via -r requirements/static/ci/common.in +boto3==1.26.152 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.29.152 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # clr-loader + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # requests +cheetah3==3.2.6.post1 + # via -r requirements/static/ci/common.in +cheroot==10.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +cherrypy==18.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in +click==8.1.3 + # via geomet +clr-loader==0.2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pythonnet +clustershell==1.9.1 + # via -r requirements/static/ci/common.in +colorama==0.4.6 + # via pytest +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +cryptography==41.0.5 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # etcd3-py + # moto + # pyopenssl + # requests-ntlm +distlib==0.3.7 + # via virtualenv +distro==1.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-skip-markers +dmidecode==0.9.0 + # via -r requirements/static/ci/windows.in +dnspython==2.3.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.13.1 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.3 + # via + # aiohttp + # aiosignal +genshi==0.7.7 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.10 + # via gitpython +gitpython==3.1.40 + # via -r requirements/static/ci/common.in +google-auth==2.19.1 + # via kubernetes +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # contextvars +importlib-metadata==6.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +inflect==7.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.text +iniconfig==2.0.0 + # via pytest +ipaddress==1.0.23 + # via kubernetes +jaraco.collections==4.1.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +jaraco.context==4.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.text +jaraco.functools==3.7.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +junit-xml==1.9 + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +looseversion==1.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +lxml==4.9.1 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # xmldiff +mako==1.2.4 + # via -r requirements/static/ci/common.in +markupsafe==2.1.3 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +moto==4.1.11 + # via -r requirements/static/ci/common.in +msgpack==1.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +ntlm-auth==1.5.0 + # via requests-ntlm +packaging==23.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +patch==1.16 + # via -r requirements/static/ci/windows.in +pathspec==0.11.1 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==4.0.0 + # via virtualenv +pluggy==1.0.0 + # via pytest +portend==3.1.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +psutil==5.9.6 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cffi +pycryptodomex==3.10.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/crypto.txt +pydantic-core==2.14.5 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pydantic +pydantic==2.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # inflect +pygit2==1.13.1 + # via -r requirements/static/ci/windows.in +pymssql==2.2.7 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +pymysql==1.1.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # etcd3-py +pyrsistent==0.19.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # botocore + # kubernetes + # moto +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +pythonnet==3.0.3 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +pytz==2023.3.post1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # moto + # tempora +pyvmomi==8.0.1.0.1 + # via -r requirements/static/ci/common.in +pywin32==306 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # docker + # pytest-skip-markers + # wmi +pywinrm==0.4.1 + # via -r requirements/static/ci/windows.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==25.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # pywinrm + # requests-ntlm + # responses +responses==0.23.1 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.9 + # via google-auth +s3transfer==0.6.1 + # via boto3 +sed==0.3.1 + # via -r requirements/static/ci/windows.in +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +six==1.15.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cassandra-driver + # etcd3-py + # genshi + # geomet + # jsonschema + # junit-xml + # kubernetes + # python-dateutil + # pyvmomi + # pywinrm + # responses + # websocket-client +smmap==5.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==5.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # portend +timelib==0.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.3.3 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # inflect + # pydantic + # pydantic-core + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +virtualenv==20.24.7 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==3.0.0 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +wmi==1.5.1 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +xmldiff==2.6.3 + # via -r requirements/static/ci/common.in +xmltodict==0.13.0 + # via + # moto + # pywinrm +yamllint==1.32.0 + # via -r requirements/static/ci/windows.in +yarl==1.9.2 + # via aiohttp +zc.lockfile==3.0.post1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +zipp==3.16.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.8/changelog.txt b/requirements/static/ci/py3.8/changelog.txt index b3eda655636..9ebf07b1d35 100644 --- a/requirements/static/ci/py3.8/changelog.txt +++ b/requirements/static/ci/py3.8/changelog.txt @@ -17,11 +17,11 @@ jinja2==3.1.2 # via # -c requirements/static/ci/py3.8/linux.txt # towncrier -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/changelog.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.8/linux.txt # jinja2 diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index d4b0c155225..9aef5b46b46 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -12,7 +12,7 @@ certifi==2023.07.22 # via # -c requirements/static/ci/py3.8/linux.txt # requests -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/py3.8/linux.txt # cryptography @@ -20,7 +20,7 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.8/linux.txt # requests -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/py3.8/linux.txt # pyspnego diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index c708e6f6c76..02f6611b90d 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -78,7 +78,7 @@ markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.8/linux.txt # jinja2 @@ -111,7 +111,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.15.1 # via sphinx -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/py3.8/linux.txt # babel diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 63e17ed9e6a..be5005493cc 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -56,7 +56,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/static/ci/common.in @@ -66,7 +66,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -89,7 +88,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt @@ -98,7 +97,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -115,7 +114,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -215,7 +214,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt @@ -227,7 +226,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt @@ -248,7 +247,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt @@ -294,7 +293,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -302,7 +301,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt @@ -357,7 +356,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -394,11 +393,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # tempora @@ -417,7 +416,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/zeromq.txt @@ -530,7 +529,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 36ee8e0b975..7f452393e82 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -6,7 +6,7 @@ # aiohttp-retry==2.8.3 # via twilio -aiohttp==3.8.5 +aiohttp==3.9.0 # via # aiohttp-retry # etcd3-py @@ -69,7 +69,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/static/ci/common.in @@ -80,7 +80,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -103,7 +102,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt @@ -113,7 +112,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -132,7 +131,7 @@ exceptiongroup==1.1.1 # via # anyio # pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -241,7 +240,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt @@ -253,7 +252,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt @@ -274,7 +273,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt @@ -321,7 +320,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -329,7 +328,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt @@ -392,7 +391,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -431,13 +430,13 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt python-telegram-bot==20.3 # via -r requirements/static/ci/linux.in -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # tempora @@ -458,7 +457,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/zeromq.txt @@ -595,7 +594,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index d24ec5d3135..a800d2b9161 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -57,7 +57,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -86,7 +85,7 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -95,7 +94,7 @@ cryptography==41.0.4 # pyopenssl # pyspnego # requests-ntlm -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -114,7 +113,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -196,7 +195,7 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -207,7 +206,7 @@ lxml==4.9.2 ; sys_platform == "win32" # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -226,7 +225,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -247,7 +246,7 @@ patch==1.16 # via -r requirements/static/ci/windows.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -255,7 +254,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # cherrypy -psutil==5.8.0 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -310,7 +309,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -345,7 +344,7 @@ python-dateutil==2.8.2 # moto python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -353,7 +352,7 @@ pythonnet==3.0.1 ; sys_platform == "win32" # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # tempora @@ -378,7 +377,7 @@ pyyaml==6.0.1 # pytest-salt-factories # responses # yamllint -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/zeromq.txt @@ -468,7 +467,7 @@ urllib3==1.26.18 # python-etcd # requests # responses -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.9/changelog.txt b/requirements/static/ci/py3.9/changelog.txt index 7ddf4d07919..62dc37ebc7e 100644 --- a/requirements/static/ci/py3.9/changelog.txt +++ b/requirements/static/ci/py3.9/changelog.txt @@ -17,11 +17,11 @@ jinja2==3.1.2 # via # -c requirements/static/ci/py3.9/linux.txt # towncrier -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/changelog.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.9/linux.txt # jinja2 diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index edaa86a9015..3967297215a 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -12,7 +12,7 @@ certifi==2023.07.22 # via # -c requirements/static/ci/py3.9/linux.txt # requests -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/py3.9/linux.txt # cryptography @@ -20,7 +20,7 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.9/linux.txt # requests -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/py3.9/linux.txt # pyspnego diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index d419d6dfb0d..8e1e017243b 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -56,7 +56,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/static/ci/common.in @@ -67,7 +67,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -90,7 +89,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -99,7 +98,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -116,7 +115,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -208,7 +207,7 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -220,7 +219,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -241,7 +240,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -286,7 +285,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -294,7 +293,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -349,7 +348,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -386,11 +385,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # tempora @@ -409,7 +408,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/zeromq.txt @@ -521,7 +520,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index 767a22dcf08..a488996774d 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -74,7 +74,7 @@ markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/py3.9/linux.txt # jinja2 @@ -107,7 +107,7 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.15.1 # via sphinx -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/py3.9/linux.txt # tempora diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 1c2bcec7afa..9be1c6be968 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -56,7 +56,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/ci/common.in @@ -66,7 +66,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -89,7 +88,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -98,7 +97,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -115,7 +114,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -211,7 +210,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -223,7 +222,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -244,7 +243,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -290,7 +289,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -298,7 +297,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -353,7 +352,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -390,11 +389,11 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # tempora @@ -413,7 +412,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/zeromq.txt @@ -526,7 +525,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 48617bf1fcd..d4ea46622cb 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -6,16 +6,16 @@ # aiohttp-retry==2.8.3 # via twilio -aiohttp==3.8.5 +aiohttp==3.9.0 # via # aiohttp-retry # etcd3-py # twilio aiosignal==1.3.1 # via aiohttp -ansible-core==2.15.0 +ansible-core==2.15.6 # via ansible -ansible==8.0.0 ; python_version >= "3.9" +ansible==8.6.1 ; python_version >= "3.9" # via -r requirements/static/ci/linux.in anyio==3.7.0 # via httpcore @@ -69,7 +69,7 @@ certifi==2023.07.22 # requests certvalidator==0.11.1 # via vcert -cffi==1.15.1 +cffi==1.14.6 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/ci/common.in @@ -80,7 +80,6 @@ cffi==1.15.1 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -103,7 +102,7 @@ contextvars==2.4 # -r requirements/base.txt croniter==1.3.15 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -113,7 +112,7 @@ cryptography==41.0.4 # paramiko # pyopenssl # vcert -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -132,7 +131,7 @@ exceptiongroup==1.1.1 # via # anyio # pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -239,7 +238,7 @@ kubernetes==3.0.0 # via -r requirements/static/ci/common.in libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -251,7 +250,7 @@ lxml==4.9.2 # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -272,7 +271,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -319,7 +318,7 @@ passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -327,7 +326,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # cherrypy -psutil==5.9.5 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -390,7 +389,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -429,13 +428,13 @@ python-dateutil==2.8.2 # vcert python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt python-telegram-bot==20.3 # via -r requirements/static/ci/linux.in -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # tempora @@ -456,7 +455,7 @@ pyyaml==6.0.1 # responses # yamllint # yamlordereddictloader -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/zeromq.txt @@ -593,7 +592,7 @@ urllib3==1.26.18 # responses vcert==0.9.1 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/py3.9/tools-virustotal.txt b/requirements/static/ci/py3.9/tools-virustotal.txt new file mode 100644 index 00000000000..1b04a95c53a --- /dev/null +++ b/requirements/static/ci/py3.9/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.9/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +idna==3.4 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index f5ce6c7954d..26fa0f128d2 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/tools.txt requirements/static/ci/tools.in # -attrs==23.1.0 +attrs==20.3.0 # via # -r requirements/static/ci/tools.in # python-tools-scripts @@ -14,49 +14,69 @@ botocore==1.29.152 # via # boto3 # s3transfer -certifi==2023.07.22 - # via requests +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests charset-normalizer==3.2.0 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests idna==3.4 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests jinja2==3.1.2 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/ci/tools.in jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # boto3 # botocore markdown-it-py==3.0.0 # via rich markupsafe==2.1.3 - # via jinja2 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # jinja2 mdurl==0.1.2 # via markdown-it-py packaging==23.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/ci/tools.in pygments==2.15.1 # via rich python-dateutil==2.8.2 - # via botocore -python-tools-scripts==0.18.1 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # botocore +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/ci/tools.in requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # python-tools-scripts - # virustotal3 rich==13.4.2 # via python-tools-scripts s3transfer==0.6.1 # via boto3 six==1.16.0 - # via python-dateutil + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # python-dateutil typing-extensions==4.8.0 - # via python-tools-scripts + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # python-tools-scripts urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # botocore # requests -virustotal3==1.0.8 - # via -r requirements/static/ci/tools.in diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 393205d4ef4..a250cb2135c 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -57,7 +57,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in @@ -86,7 +85,7 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -95,7 +94,7 @@ cryptography==41.0.4 # pyopenssl # pyspnego # requests-ntlm -distlib==0.3.6 +distlib==0.3.7 # via virtualenv distro==1.8.0 # via @@ -114,7 +113,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.1.1 # via pytest -filelock==3.12.4 +filelock==3.13.1 # via virtualenv flaky==3.7.0 # via -r requirements/pytest.txt @@ -192,7 +191,7 @@ keyring==5.7.1 # via -r requirements/static/ci/common.in kubernetes==3.0.0 # via -r requirements/static/ci/common.in -looseversion==1.2.0 +looseversion==1.3.0 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -203,7 +202,7 @@ lxml==4.9.2 ; sys_platform == "win32" # xmldiff mako==1.2.4 # via -r requirements/static/ci/common.in -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -222,7 +221,7 @@ more-itertools==9.1.0 # jaraco.text moto==4.1.11 # via -r requirements/static/ci/common.in -msgpack==1.0.5 +msgpack==1.0.7 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -243,7 +242,7 @@ patch==1.16 # via -r requirements/static/ci/windows.in pathspec==0.11.1 # via yamllint -platformdirs==3.5.3 +platformdirs==4.0.0 # via virtualenv pluggy==1.0.0 # via pytest @@ -251,7 +250,7 @@ portend==3.1.0 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # cherrypy -psutil==5.8.0 +psutil==5.9.6 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -306,7 +305,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories @@ -341,7 +340,7 @@ python-dateutil==2.8.2 # moto python-etcd==0.4.5 # via -r requirements/static/ci/common.in -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -349,7 +348,7 @@ pythonnet==3.0.1 ; sys_platform == "win32" # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # tempora @@ -374,7 +373,7 @@ pyyaml==6.0.1 # pytest-salt-factories # responses # yamllint -pyzmq==25.1.0 +pyzmq==25.1.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/zeromq.txt @@ -464,7 +463,7 @@ urllib3==1.26.18 # python-etcd # requests # responses -virtualenv==20.23.0 +virtualenv==20.24.7 # via # -r requirements/static/ci/common.in # pytest-salt-factories diff --git a/requirements/static/ci/tools-virustotal.in b/requirements/static/ci/tools-virustotal.in new file mode 100644 index 00000000000..b7d1a356f4e --- /dev/null +++ b/requirements/static/ci/tools-virustotal.in @@ -0,0 +1,3 @@ +--constraint=../pkg/py{py_version}/{platform}.txt + +virustotal3 diff --git a/requirements/static/ci/tools.in b/requirements/static/ci/tools.in index 9066c498fcc..367eb857b4a 100644 --- a/requirements/static/ci/tools.in +++ b/requirements/static/ci/tools.in @@ -1,7 +1,8 @@ -python-tools-scripts >= 0.18.1 +--constraint=../pkg/py{py_version}/{platform}.txt + attrs +python-tools-scripts >= 0.18.5 boto3 pyyaml jinja2 packaging -virustotal3 diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index 2702a588144..07545fb8cbe 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -47,9 +47,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -59,13 +59,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -77,13 +77,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index fdf1dae044b..12789d2baa3 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -47,9 +47,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -59,13 +59,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -77,13 +77,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index 3dfac41d889..3135c124193 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -47,9 +47,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -59,13 +59,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -77,13 +77,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index a408b56dfa4..30570a770f5 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -22,7 +22,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -51,11 +51,11 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt lxml==4.9.2 ; sys_platform == "win32" # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -65,13 +65,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.8.0 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -87,11 +87,11 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt pythonnet==3.0.1 ; sys_platform == "win32" # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pywin32==306 ; sys_platform == "win32" # via @@ -99,7 +99,7 @@ pywin32==306 ; sys_platform == "win32" # wmi pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt index c8da143624a..57bff3fd051 100644 --- a/requirements/static/pkg/py3.11/darwin.txt +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -4,11 +4,13 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in # +annotated-types==0.6.0 + # via pydantic autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +20,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -30,7 +32,7 @@ immutables==0.15 # via contextvars importlib-metadata==6.6.0 # via -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via jaraco.text jaraco.collections==4.1.0 # via cherrypy @@ -47,43 +49,45 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 -more-itertools==9.1.0 +more-itertools==8.2.0 # via # cheroot # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 # via inflect pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt @@ -98,7 +102,10 @@ timelib==0.3.0 tornado==6.3.3 # via -r requirements/base.txt typing-extensions==4.8.0 - # via pydantic + # via + # inflect + # pydantic + # pydantic-core urllib3==1.26.18 # via requests zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt index daffdd6023e..89479e2190c 100644 --- a/requirements/static/pkg/py3.11/freebsd.txt +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -4,11 +4,13 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # +annotated-types==0.6.0 + # via pydantic autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +20,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -30,7 +32,7 @@ immutables==0.15 # via contextvars importlib-metadata==6.6.0 # via -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via jaraco.text jaraco.collections==4.1.0 # via cherrypy @@ -47,50 +49,54 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 -more-itertools==9.1.0 +more-itertools==5.0.0 # via # cheroot # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 # via inflect pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/base.txt six==1.16.0 - # via python-dateutil + # via + # more-itertools + # python-dateutil tempora==5.3.0 # via portend timelib==0.3.0 @@ -98,7 +104,10 @@ timelib==0.3.0 tornado==6.3.3 # via -r requirements/base.txt typing-extensions==4.8.0 - # via pydantic + # via + # inflect + # pydantic + # pydantic-core urllib3==1.26.18 # via requests zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt index f042dadaf17..a4045c0c31b 100644 --- a/requirements/static/pkg/py3.11/linux.txt +++ b/requirements/static/pkg/py3.11/linux.txt @@ -4,11 +4,13 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # +annotated-types==0.6.0 + # via pydantic autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +20,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -30,7 +32,7 @@ immutables==0.15 # via contextvars importlib-metadata==6.6.0 # via -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via jaraco.text jaraco.collections==4.1.0 # via cherrypy @@ -47,43 +49,45 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 -more-itertools==9.1.0 +more-itertools==5.0.0 # via # cheroot # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 # via inflect pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt @@ -92,7 +96,9 @@ rpm-vercmp==0.1.2 ; sys_platform == "linux" setproctitle==1.3.2 # via -r requirements/base.txt six==1.16.0 - # via python-dateutil + # via + # more-itertools + # python-dateutil tempora==5.3.0 # via portend timelib==0.3.0 @@ -100,7 +106,10 @@ timelib==0.3.0 tornado==6.3.3 # via -r requirements/base.txt typing-extensions==4.8.0 - # via pydantic + # via + # inflect + # pydantic + # pydantic-core urllib3==1.26.18 # via requests zc.lockfile==3.0.post1 diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index 2940d3f32cb..93024b01446 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -4,6 +4,8 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # +annotated-types==0.6.0 + # via pydantic autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 @@ -22,7 +24,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -34,7 +36,7 @@ immutables==0.15 # via contextvars importlib-metadata==6.6.0 # via -r requirements/base.txt -inflect==6.0.4 +inflect==7.0.0 # via jaraco.text jaraco.collections==4.1.0 # via cherrypy @@ -51,33 +53,35 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -lxml==4.9.2 ; sys_platform == "win32" +lxml==4.9.1 ; sys_platform == "win32" # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 -more-itertools==9.1.0 +more-itertools==8.2.0 # via # cheroot # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.8.0 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi -pycryptodomex==3.9.8 +pycryptodomex==3.10.1 # via -r requirements/crypto.txt -pydantic==1.10.8 +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 # via inflect pymssql==2.2.7 ; sys_platform == "win32" # via -r requirements/base.txt @@ -87,19 +91,19 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt pythonnet==3.0.1 ; sys_platform == "win32" # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora -pywin32==306 ; sys_platform == "win32" +pywin32==305 ; sys_platform == "win32" # via # -r requirements/base.txt # wmi pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt @@ -114,7 +118,10 @@ timelib==0.3.0 tornado==6.3.3 # via -r requirements/base.txt typing-extensions==4.8.0 - # via pydantic + # via + # inflect + # pydantic + # pydantic-core urllib3==1.26.18 # via requests wmi==1.5.1 ; sys_platform == "win32" diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt new file mode 100644 index 00000000000..57bff3fd051 --- /dev/null +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -0,0 +1,117 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# +annotated-types==0.6.0 + # via pydantic +autocommand==2.2.2 + # via jaraco.text +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==10.0.0 + # via cherrypy +cherrypy==18.8.0 + # via -r requirements/base.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.5 + # via + # -r requirements/base.txt + # pyopenssl +distro==1.8.0 + # via -r requirements/base.txt +idna==3.4 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.6.0 + # via -r requirements/base.txt +inflect==7.0.0 + # via jaraco.text +jaraco.collections==4.1.0 + # via cherrypy +jaraco.context==4.3.0 + # via jaraco.text +jaraco.functools==3.7.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.3.0 + # via -r requirements/base.txt +markupsafe==2.1.3 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +msgpack==1.0.7 + # via -r requirements/base.txt +packaging==23.1 + # via -r requirements/base.txt +portend==3.1.0 + # via cherrypy +psutil==5.9.6 + # via -r requirements/base.txt +pycparser==2.21 + # via cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 + # via inflect +pyopenssl==23.2.0 + # via -r requirements/base.txt +python-dateutil==2.8.2 + # via -r requirements/base.txt +python-gnupg==0.5.1 + # via -r requirements/base.txt +pytz==2023.3.post1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.1.1 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/base.txt +six==1.16.0 + # via python-dateutil +tempora==5.3.0 + # via portend +timelib==0.3.0 + # via -r requirements/base.txt +tornado==6.3.3 + # via -r requirements/base.txt +typing-extensions==4.8.0 + # via + # inflect + # pydantic + # pydantic-core +urllib3==1.26.18 + # via requests +zc.lockfile==3.0.post1 + # via cherrypy +zipp==3.16.2 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt new file mode 100644 index 00000000000..e10f8d6b782 --- /dev/null +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -0,0 +1,119 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +annotated-types==0.6.0 + # via pydantic +autocommand==2.2.2 + # via jaraco.text +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==10.0.0 + # via cherrypy +cherrypy==18.8.0 + # via -r requirements/base.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.5 + # via + # -r requirements/base.txt + # pyopenssl +distro==1.8.0 + # via -r requirements/base.txt +idna==3.4 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.6.0 + # via -r requirements/base.txt +inflect==7.0.0 + # via jaraco.text +jaraco.collections==4.1.0 + # via cherrypy +jaraco.context==4.3.0 + # via jaraco.text +jaraco.functools==3.7.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.3.0 + # via -r requirements/base.txt +markupsafe==2.1.3 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +msgpack==1.0.7 + # via -r requirements/base.txt +packaging==23.1 + # via -r requirements/base.txt +portend==3.1.0 + # via cherrypy +psutil==5.9.6 + # via -r requirements/base.txt +pycparser==2.21 + # via cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 + # via inflect +pyopenssl==23.2.0 + # via -r requirements/base.txt +python-dateutil==2.8.2 + # via -r requirements/base.txt +python-gnupg==0.5.1 + # via -r requirements/base.txt +pytz==2023.3.post1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.1.1 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/base.txt +six==1.16.0 + # via + # more-itertools + # python-dateutil +tempora==5.3.0 + # via portend +timelib==0.3.0 + # via -r requirements/base.txt +tornado==6.3.3 + # via -r requirements/base.txt +typing-extensions==4.8.0 + # via + # inflect + # pydantic + # pydantic-core +urllib3==1.26.18 + # via requests +zc.lockfile==3.0.post1 + # via cherrypy +zipp==3.16.2 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt new file mode 100644 index 00000000000..2927689cce5 --- /dev/null +++ b/requirements/static/pkg/py3.12/linux.txt @@ -0,0 +1,121 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# +annotated-types==0.6.0 + # via pydantic +autocommand==2.2.2 + # via jaraco.text +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==10.0.0 + # via cherrypy +cherrypy==18.8.0 + # via -r requirements/base.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.5 + # via + # -r requirements/base.txt + # pyopenssl +distro==1.8.0 + # via -r requirements/base.txt +idna==3.4 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.6.0 + # via -r requirements/base.txt +inflect==7.0.0 + # via jaraco.text +jaraco.collections==4.1.0 + # via cherrypy +jaraco.context==4.3.0 + # via jaraco.text +jaraco.functools==3.7.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.3.0 + # via -r requirements/base.txt +markupsafe==2.1.3 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +msgpack==1.0.7 + # via -r requirements/base.txt +packaging==23.1 + # via -r requirements/base.txt +portend==3.1.0 + # via cherrypy +psutil==5.9.6 + # via -r requirements/base.txt +pycparser==2.21 + # via cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 + # via inflect +pyopenssl==23.2.0 + # via -r requirements/base.txt +python-dateutil==2.8.2 + # via -r requirements/base.txt +python-gnupg==0.5.1 + # via -r requirements/base.txt +pytz==2023.3.post1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.1.1 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +rpm-vercmp==0.1.2 ; sys_platform == "linux" + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/base.txt +six==1.16.0 + # via + # more-itertools + # python-dateutil +tempora==5.3.0 + # via portend +timelib==0.3.0 + # via -r requirements/base.txt +tornado==6.3.3 + # via -r requirements/base.txt +typing-extensions==4.8.0 + # via + # inflect + # pydantic + # pydantic-core +urllib3==1.26.18 + # via requests +zc.lockfile==3.0.post1 + # via cherrypy +zipp==3.16.2 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt new file mode 100644 index 00000000000..411f6f2e639 --- /dev/null +++ b/requirements/static/pkg/py3.12/windows.txt @@ -0,0 +1,135 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# +annotated-types==0.6.0 + # via pydantic +autocommand==2.2.2 + # via jaraco.text +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via + # clr-loader + # cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==10.0.0 + # via cherrypy +cherrypy==18.8.0 + # via -r requirements/base.txt +clr-loader==0.2.6 + # via pythonnet +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.5 + # via + # -r requirements/base.txt + # pyopenssl +distro==1.8.0 + # via -r requirements/base.txt +idna==3.4 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.6.0 + # via -r requirements/base.txt +inflect==7.0.0 + # via jaraco.text +jaraco.collections==4.1.0 + # via cherrypy +jaraco.context==4.3.0 + # via jaraco.text +jaraco.functools==3.7.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.11.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.3.0 + # via -r requirements/base.txt +lxml==4.9.1 ; sys_platform == "win32" + # via -r requirements/base.txt +markupsafe==2.1.3 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.functools + # jaraco.text +msgpack==1.0.7 + # via -r requirements/base.txt +packaging==23.1 + # via -r requirements/base.txt +portend==3.1.0 + # via cherrypy +psutil==5.9.6 + # via -r requirements/base.txt +pycparser==2.21 + # via cffi +pycryptodomex==3.10.1 + # via -r requirements/crypto.txt +pydantic-core==2.14.5 + # via pydantic +pydantic==2.5.2 + # via inflect +pymssql==2.2.7 ; sys_platform == "win32" + # via -r requirements/base.txt +pymysql==1.1.0 ; sys_platform == "win32" + # via -r requirements/base.txt +pyopenssl==23.2.0 + # via -r requirements/base.txt +python-dateutil==2.8.2 + # via -r requirements/base.txt +python-gnupg==0.5.1 + # via -r requirements/base.txt +pythonnet==3.0.3 ; sys_platform == "win32" + # via -r requirements/base.txt +pytz==2023.3.post1 + # via tempora +pywin32==306 ; sys_platform == "win32" + # via + # -r requirements/base.txt + # wmi +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.1.1 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/base.txt +six==1.15.0 + # via python-dateutil +tempora==5.3.0 + # via portend +timelib==0.3.0 + # via -r requirements/base.txt +tornado==6.3.3 + # via -r requirements/base.txt +typing-extensions==4.8.0 + # via + # inflect + # pydantic + # pydantic-core +urllib3==1.26.18 + # via requests +wmi==1.5.1 ; sys_platform == "win32" + # via -r requirements/base.txt +zc.lockfile==3.0.post1 + # via cherrypy +zipp==3.16.2 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index e7550ee7f08..431cf3e0a32 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -49,9 +49,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -61,13 +61,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -79,13 +79,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index a3de491311f..57e20b2deeb 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -49,9 +49,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -61,13 +61,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -79,13 +79,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index b0cafc06d43..f1883bb6b48 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -22,7 +22,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -53,11 +53,11 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt lxml==4.9.2 ; sys_platform == "win32" # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -67,13 +67,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.8.0 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -89,11 +89,11 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt pythonnet==3.0.1 ; sys_platform == "win32" # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pywin32==306 ; sys_platform == "win32" # via @@ -102,7 +102,7 @@ pywin32==306 ; sys_platform == "win32" # wmi pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 612ba9616d0..2161a26e6c5 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -47,9 +47,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -59,13 +59,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -77,13 +77,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index c3b84bb78c0..4ede7113180 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -47,9 +47,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -59,13 +59,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -77,13 +77,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index 8e16d07dbd9..d6f4798216f 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -8,7 +8,7 @@ autocommand==2.2.2 # via jaraco.text certifi==2023.07.22 # via requests -cffi==1.15.1 +cffi==1.14.6 # via cryptography charset-normalizer==3.2.0 # via requests @@ -18,7 +18,7 @@ cherrypy==18.8.0 # via -r requirements/base.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -47,9 +47,9 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -59,13 +59,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.9.5 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -77,13 +77,13 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index d00f41bfe9e..d3139f837d4 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -22,7 +22,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==41.0.4 +cryptography==41.0.5 # via # -r requirements/base.txt # pyopenssl @@ -51,11 +51,11 @@ jinja2==3.1.2 # via -r requirements/base.txt jmespath==1.0.1 # via -r requirements/base.txt -looseversion==1.2.0 +looseversion==1.3.0 # via -r requirements/base.txt lxml==4.9.2 ; sys_platform == "win32" # via -r requirements/base.txt -markupsafe==2.1.2 +markupsafe==2.1.3 # via # -r requirements/base.txt # jinja2 @@ -65,13 +65,13 @@ more-itertools==9.1.0 # cherrypy # jaraco.functools # jaraco.text -msgpack==1.0.5 +msgpack==1.0.7 # via -r requirements/base.txt packaging==23.1 # via -r requirements/base.txt portend==3.1.0 # via cherrypy -psutil==5.8.0 +psutil==5.9.6 # via -r requirements/base.txt pycparser==2.21 # via cffi @@ -87,11 +87,11 @@ pyopenssl==23.2.0 # via -r requirements/base.txt python-dateutil==2.8.2 # via -r requirements/base.txt -python-gnupg==0.5.0 +python-gnupg==0.5.1 # via -r requirements/base.txt pythonnet==3.0.1 ; sys_platform == "win32" # via -r requirements/base.txt -pytz==2023.3 +pytz==2023.3.post1 # via tempora pywin32==306 ; sys_platform == "win32" # via @@ -100,7 +100,7 @@ pywin32==306 ; sys_platform == "win32" # wmi pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.1.0 +pyzmq==25.1.1 # via -r requirements/zeromq.txt requests==2.31.0 # via -r requirements/base.txt diff --git a/salt/channel/client.py b/salt/channel/client.py index 35a5c5d05cd..49562ce6fae 100644 --- a/salt/channel/client.py +++ b/salt/channel/client.py @@ -564,19 +564,16 @@ class AsyncPubChannel: "data": data, "tag": tag, } - req_channel = AsyncReqChannel.factory(self.opts) - try: - yield req_channel.send(load, timeout=60) - except salt.exceptions.SaltReqTimeoutError: - log.info( - "fire_master failed: master could not be contacted. Request timed" - " out." - ) - except Exception: # pylint: disable=broad-except - log.info("fire_master failed", exc_info=True) - finally: - # SyncWrapper will call either close() or destroy(), whichever is available - del req_channel + with AsyncReqChannel.factory(self.opts) as channel: + try: + yield channel.send(load, timeout=60) + except salt.exceptions.SaltReqTimeoutError: + log.info( + "fire_master failed: master could not be contacted. Request timed" + " out." + ) + except Exception: # pylint: disable=broad-except + log.info("fire_master failed", exc_info=True) else: self._reconnected = True except Exception as exc: # pylint: disable=broad-except diff --git a/salt/client/__init__.py b/salt/client/__init__.py index 39a8b33268d..013c860ff69 100644 --- a/salt/client/__init__.py +++ b/salt/client/__init__.py @@ -247,7 +247,7 @@ class LocalClient: # The username may contain '\' if it is in Windows # 'DOMAIN\username' format. Fix this for the keyfile path. key_user = key_user.replace("\\", "_") - keyfile = os.path.join(self.opts["cachedir"], ".{}_key".format(key_user)) + keyfile = os.path.join(self.opts["cachedir"], f".{key_user}_key") try: # Make sure all key parent directories are accessible salt.utils.verify.check_path_traversal( @@ -267,7 +267,7 @@ class LocalClient: try: return range_.expand(tgt) except seco.range.RangeException as err: - print("Range server exception: {}".format(err)) + print(f"Range server exception: {err}") return [] def _get_timeout(self, timeout): @@ -300,7 +300,7 @@ class LocalClient: tgt_type=tgt_type, timeout=timeout, listen=listen, - **kwargs + **kwargs, ) if "jid" in pub_data: @@ -366,7 +366,7 @@ class LocalClient: jid="", kwarg=None, listen=False, - **kwargs + **kwargs, ): """ Asynchronously send a command to connected minions @@ -394,7 +394,7 @@ class LocalClient: jid=jid, timeout=self._get_timeout(timeout), listen=listen, - **kwargs + **kwargs, ) except SaltClientError: # Re-raise error with specific message @@ -430,7 +430,7 @@ class LocalClient: kwarg=None, listen=True, io_loop=None, - **kwargs + **kwargs, ): """ Asynchronously send a command to connected minions @@ -459,7 +459,7 @@ class LocalClient: timeout=self._get_timeout(timeout), io_loop=io_loop, listen=listen, - **kwargs + **kwargs, ) except SaltClientError: # Re-raise error with specific message @@ -512,7 +512,7 @@ class LocalClient: cli=False, progress=False, full_return=False, - **kwargs + **kwargs, ): """ Execute a command on a random subset of the targeted systems @@ -554,7 +554,7 @@ class LocalClient: kwarg=kwarg, progress=progress, full_return=full_return, - **kwargs + **kwargs, ) def cmd_batch( @@ -566,7 +566,7 @@ class LocalClient: ret="", kwarg=None, batch="10%", - **kwargs + **kwargs, ): """ Iteratively execute a command on subsets of minions at a time @@ -642,7 +642,7 @@ class LocalClient: jid="", full_return=False, kwarg=None, - **kwargs + **kwargs, ): """ Synchronously execute a command on targeted minions @@ -760,7 +760,7 @@ class LocalClient: jid, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -773,7 +773,7 @@ class LocalClient: self._get_timeout(timeout), tgt, tgt_type, - **kwargs + **kwargs, ): if fn_ret: @@ -798,7 +798,7 @@ class LocalClient: verbose=False, kwarg=None, progress=False, - **kwargs + **kwargs, ): """ Used by the :command:`salt` CLI. This method returns minion returns as @@ -822,7 +822,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not self.pub_data: yield self.pub_data @@ -836,7 +836,7 @@ class LocalClient: tgt_type, verbose, progress, - **kwargs + **kwargs, ): if not fn_ret: @@ -867,7 +867,7 @@ class LocalClient: tgt_type="glob", ret="", kwarg=None, - **kwargs + **kwargs, ): """ Yields the individual minion returns as they come in @@ -902,7 +902,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -916,7 +916,7 @@ class LocalClient: timeout=self._get_timeout(timeout), tgt=tgt, tgt_type=tgt_type, - **kwargs + **kwargs, ): if not fn_ret: continue @@ -937,7 +937,7 @@ class LocalClient: kwarg=None, show_jid=False, verbose=False, - **kwargs + **kwargs, ): """ Yields the individual minion returns as they come in, or None @@ -973,7 +973,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -986,7 +986,7 @@ class LocalClient: tgt=tgt, tgt_type=tgt_type, block=False, - **kwargs + **kwargs, ): if fn_ret and any([show_jid, verbose]): for minion in fn_ret: @@ -1008,7 +1008,7 @@ class LocalClient: ret="", verbose=False, kwarg=None, - **kwargs + **kwargs, ): """ Execute a salt command and return @@ -1025,7 +1025,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -1047,7 +1047,7 @@ class LocalClient: tgt_type="glob", verbose=False, show_jid=False, - **kwargs + **kwargs, ): """ Starts a watcher looking at the return data for a specified JID @@ -1055,11 +1055,11 @@ class LocalClient: :returns: all of the information for the JID """ if verbose: - msg = "Executing job with jid {}".format(jid) + msg = f"Executing job with jid {jid}" print(msg) print("-" * len(msg) + "\n") elif show_jid: - print("jid: {}".format(jid)) + print(f"jid: {jid}") if timeout is None: timeout = self.opts["timeout"] fret = {} @@ -1124,7 +1124,7 @@ class LocalClient: tgt_type="glob", expect_minions=False, block=True, - **kwargs + **kwargs, ): """ Watch the event system and return job data as it comes in @@ -1165,11 +1165,9 @@ class LocalClient: # iterator for this job's return if self.opts["order_masters"]: # If we are a MoM, we need to gather expected minions from downstreams masters. - ret_iter = self.get_returns_no_block( - "(salt/job|syndic/.*)/{}".format(jid), "regex" - ) + ret_iter = self.get_returns_no_block(f"(salt/job|syndic/.*)/{jid}", "regex") else: - ret_iter = self.get_returns_no_block("salt/job/{}".format(jid)) + ret_iter = self.get_returns_no_block(f"salt/job/{jid}") # iterator for the info of this job jinfo_iter = [] # open event jids that need to be un-subscribed from later @@ -1203,7 +1201,13 @@ class LocalClient: if "missing" in raw.get("data", {}): missing.update(raw["data"]["missing"]) continue + + # Anything below this point is expected to be a job return event. + if not raw["tag"].startswith(f"salt/job/{jid}/ret"): + log.debug("Skipping non return event: %s", raw["tag"]) + continue if "return" not in raw["data"]: + log.warning("Malformed event return: %s", raw["tag"]) continue if kwargs.get("raw", False): found.add(raw["data"]["id"]) @@ -1543,11 +1547,11 @@ class LocalClient: log.trace("entered - function get_cli_static_event_returns()") minions = set(minions) if verbose: - msg = "Executing job with jid {}".format(jid) + msg = f"Executing job with jid {jid}" print(msg) print("-" * len(msg) + "\n") elif show_jid: - print("jid: {}".format(jid)) + print(f"jid: {jid}") if timeout is None: timeout = self.opts["timeout"] @@ -1577,7 +1581,7 @@ class LocalClient: time_left = timeout_at - int(time.time()) # Wait 0 == forever, use a minimum of 1s wait = max(1, time_left) - jid_tag = "salt/job/{}".format(jid) + jid_tag = f"salt/job/{jid}" raw = self.event.get_event( wait, jid_tag, auto_reconnect=self.auto_reconnect ) @@ -1629,7 +1633,7 @@ class LocalClient: progress=False, show_timeout=False, show_jid=False, - **kwargs + **kwargs, ): """ Get the returns for the command line interface via the event system @@ -1637,11 +1641,11 @@ class LocalClient: log.trace("func get_cli_event_returns()") if verbose: - msg = "Executing job with jid {}".format(jid) + msg = f"Executing job with jid {jid}" print(msg) print("-" * len(msg) + "\n") elif show_jid: - print("jid: {}".format(jid)) + print(f"jid: {jid}") # lazy load the connected minions connected_minions = None @@ -1659,7 +1663,7 @@ class LocalClient: expect_minions=( kwargs.pop("expect_minions", False) or verbose or show_timeout ), - **kwargs + **kwargs, ): log.debug("return event: %s", ret) return_count = return_count + 1 @@ -1680,7 +1684,7 @@ class LocalClient: if ( self.opts["minion_data_cache"] and salt.cache.factory(self.opts).contains( - "minions/{}".format(id_), "data" + f"minions/{id_}", "data" ) and connected_minions and id_ not in connected_minions @@ -1771,9 +1775,7 @@ class LocalClient: """ if ng not in self.opts["nodegroups"]: conf_file = self.opts.get("conf_file", "the master config file") - raise SaltInvocationError( - "Node group {} unavailable in {}".format(ng, conf_file) - ) + raise SaltInvocationError(f"Node group {ng} unavailable in {conf_file}") return salt.utils.minions.nodegroup_comp(ng, self.opts["nodegroups"]) def _prep_pub(self, tgt, fun, arg, tgt_type, ret, jid, timeout, **kwargs): @@ -1852,7 +1854,7 @@ class LocalClient: jid="", timeout=5, listen=False, - **kwargs + **kwargs, ): """ Take the required arguments and publish the given command. @@ -1954,7 +1956,7 @@ class LocalClient: timeout=5, io_loop=None, listen=True, - **kwargs + **kwargs, ): """ Take the required arguments and publish the given command. @@ -2058,8 +2060,8 @@ class LocalClient: def _clean_up_subscriptions(self, job_id): if self.opts.get("order_masters"): - self.event.unsubscribe("syndic/.*/{}".format(job_id), "regex") - self.event.unsubscribe("salt/job/{}".format(job_id)) + self.event.unsubscribe(f"syndic/.*/{job_id}", "regex") + self.event.unsubscribe(f"salt/job/{job_id}") def destroy(self): if self.event is not None: @@ -2118,7 +2120,7 @@ class FunctionWrapper(dict): """ args = list(args) for _key, _val in kwargs.items(): - args.append("{}={}".format(_key, _val)) + args.append(f"{_key}={_val}") return self.local.cmd(self.minion, key, args) return func @@ -2268,9 +2270,9 @@ class ProxyCaller: if isinstance(executors, str): executors = [executors] for name in executors: - fname = "{}.execute".format(name) + fname = f"{name}.execute" if fname not in self.sminion.executors: - raise SaltInvocationError("Executor '{}' is not available".format(name)) + raise SaltInvocationError(f"Executor '{name}' is not available") return_data = self.sminion.executors[fname]( self.opts, data, func, args, kwargs ) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index 067d4575f9b..8601d8d1745 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -1659,7 +1659,7 @@ ARGS = {arguments}\n'''.format( return -def lowstate_file_refs(chunks): +def lowstate_file_refs(chunks): # pragma: no cover """ Create a list of file ref objects to reconcile """ diff --git a/salt/config/__init__.py b/salt/config/__init__.py index f1692236a47..48599f123e6 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -49,6 +49,8 @@ log = logging.getLogger(__name__) _DFLT_REFSPECS = ["+refs/heads/*:refs/remotes/origin/*", "+refs/tags/*:refs/tags/*"] DEFAULT_INTERVAL = 60 +DEFAULT_HASH_TYPE = "sha256" + if salt.utils.platform.is_windows(): # Since an 'ipc_mode' of 'ipc' will never work on Windows due to lack of @@ -1147,7 +1149,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze( "gitfs_refspecs": _DFLT_REFSPECS, "gitfs_disable_saltenv_mapping": False, "unique_jid": False, - "hash_type": "sha256", + "hash_type": DEFAULT_HASH_TYPE, "optimization_order": [0, 1, 2], "disable_modules": [], "disable_returners": [], @@ -1471,7 +1473,7 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze( "fileserver_ignoresymlinks": False, "fileserver_verify_config": True, "max_open_files": 100000, - "hash_type": "sha256", + "hash_type": DEFAULT_HASH_TYPE, "optimization_order": [0, 1, 2], "conf_file": os.path.join(salt.syspaths.CONFIG_DIR, "master"), "open_mode": False, diff --git a/salt/fileclient.py b/salt/fileclient.py index 4ceab6aae23..39c54b93340 100644 --- a/salt/fileclient.py +++ b/salt/fileclient.py @@ -34,6 +34,7 @@ import salt.utils.templates import salt.utils.url import salt.utils.verify import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import CommandExecutionError, MinionError, SaltClientError from salt.utils.openstack.swift import SaltSwift @@ -1046,7 +1047,7 @@ class PillarClient(Client): # Local file path fnd_path = fnd - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type) ret["hash_type"] = hash_type return ret @@ -1077,7 +1078,7 @@ class PillarClient(Client): except Exception: # pylint: disable=broad-except fnd_stat = None - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type) ret["hash_type"] = hash_type return ret, fnd_stat @@ -1296,7 +1297,7 @@ class RemoteClient(Client): hsum = salt.utils.hashutils.get_hash( dest, salt.utils.stringutils.to_str( - data.get("hash_type", b"md5") + data.get("hash_type", DEFAULT_HASH_TYPE) ), ) if hsum != data["hsum"]: @@ -1410,7 +1411,7 @@ class RemoteClient(Client): return {}, None else: ret = {} - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(path, form=hash_type) ret["hash_type"] = hash_type return ret diff --git a/salt/fileserver/hgfs.py b/salt/fileserver/hgfs.py index baafa46bd8c..a7f548ac6a9 100644 --- a/salt/fileserver/hgfs.py +++ b/salt/fileserver/hgfs.py @@ -35,7 +35,6 @@ will set the desired branch method. Possible values are: ``branches``, - python bindings for mercurial (``python-hglib``) """ - import copy import errno import fnmatch @@ -54,6 +53,7 @@ import salt.utils.hashutils import salt.utils.stringutils import salt.utils.url import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import FileserverConfigError from salt.utils.event import tagify @@ -308,7 +308,7 @@ def init(): # mountpoint not specified pass - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(repo_url.encode("utf-8")).hexdigest() rp_ = os.path.join(bp_, repo_hash) if not os.path.isdir(rp_): diff --git a/salt/fileserver/svnfs.py b/salt/fileserver/svnfs.py index c45365fafb6..48843f22e67 100644 --- a/salt/fileserver/svnfs.py +++ b/salt/fileserver/svnfs.py @@ -49,6 +49,7 @@ import salt.utils.path import salt.utils.stringutils import salt.utils.url import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import FileserverConfigError from salt.utils.event import tagify @@ -192,7 +193,7 @@ def init(): # mountpoint not specified pass - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(repo_url).hexdigest() rp_ = os.path.join(bp_, repo_hash) if not os.path.isdir(rp_): diff --git a/salt/minion.py b/salt/minion.py index da4e9fabe3d..3619940446b 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -954,7 +954,19 @@ class SMinion(MinionBase): "use_master_when_local", False ): io_loop = tornado.ioloop.IOLoop.current() - io_loop.run_sync(lambda: self.eval_master(self.opts, failed=True)) + + @tornado.gen.coroutine + def eval_master(): + """ + Wrap eval master in order to close the returned publish channel. + """ + master, pub_channel = yield self.eval_master(self.opts, failed=True) + pub_channel.close() + + io_loop.run_sync( + lambda: eval_master() # pylint: disable=unnecessary-lambda + ) + self.gen_modules(initial_load=True, context=context) # If configured, cache pillar data on the minion diff --git a/salt/modules/container_resource.py b/salt/modules/container_resource.py index ceec72a7b20..0a44ce3e518 100644 --- a/salt/modules/container_resource.py +++ b/salt/modules/container_resource.py @@ -69,15 +69,13 @@ def _nsenter(pid): return f"nsenter --target {pid} --mount --uts --ipc --net --pid" -def _get_md5(name, path, run_func): +def _get_sha256(name, path, run_func): """ - Get the MD5 checksum of a file from a container + Get the sha256 checksum of a file from a container """ - output = run_func(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)[ - "stdout" - ] + ret = run_func(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True) try: - return output.split()[0] + return ret["stdout"].split()[0] except IndexError: # Destination file does not exist or could not be accessed return None @@ -368,8 +366,8 @@ def copy_to( ) # Before we try to replace the file, compare checksums. - source_md5 = __salt__["file.get_sum"](local_file, "md5") - if source_md5 == _get_md5(name, dest, run_all): + source_sha256 = __salt__["file.get_sum"](local_file, "sha256") + if source_sha256 == _get_sha256(name, dest, run_all): log.debug("%s and %s:%s are the same file, skipping copy", source, name, dest) return True @@ -399,4 +397,4 @@ def copy_to( local_file, name, PATH, dest ) __salt__["cmd.run"](copy_cmd, python_shell=True, output_loglevel="quiet") - return source_md5 == _get_md5(name, dest, run_all) + return source_sha256 == _get_sha256(name, dest, run_all) diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py index 66ecef87b56..85e2cb58b5e 100644 --- a/salt/modules/dockermod.py +++ b/salt/modules/dockermod.py @@ -531,11 +531,11 @@ def _clear_context(): pass -def _get_md5(name, path): +def _get_sha256(name, path): """ - Get the MD5 checksum of a file from a container + Get the sha256 checksum of a file from a container """ - output = run_stdout(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True) + output = run_stdout(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True) try: return output.split()[0] except IndexError: @@ -3634,8 +3634,8 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): raise SaltInvocationError(f"Source file {source} does not exist") # Before we try to replace the file, compare checksums. - source_md5 = _get_md5(name, source) - if source_md5 == __salt__["file.get_sum"](dest, "md5"): + source_sha256 = _get_sha256(name, source) + if source_sha256 == __salt__["file.get_sum"](dest, "sha256"): log.debug("%s:%s and %s are the same file, skipping copy", name, source, dest) return True @@ -3647,7 +3647,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): src_path = f"{name}:{source}" cmd = ["docker", "cp", src_path, dest_dir] __salt__["cmd.run"](cmd, python_shell=False) - return source_md5 == __salt__["file.get_sum"](dest, "md5") + return source_sha256 == __salt__["file.get_sum"](dest, "sha256") # Docker cp gets a file from the container, alias this to copy_from diff --git a/salt/modules/guestfs.py b/salt/modules/guestfs.py index 1d03ab693f2..2395bd2a1c3 100644 --- a/salt/modules/guestfs.py +++ b/salt/modules/guestfs.py @@ -11,6 +11,7 @@ import tempfile import time import salt.utils.path +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -51,7 +52,7 @@ def mount(location, access="rw", root=None): while True: if os.listdir(root): # Stuff is in there, don't use it - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) rand = hash_type(os.urandom(32)).hexdigest() root = os.path.join( tempfile.gettempdir(), diff --git a/salt/modules/junos.py b/salt/modules/junos.py index 33f25080e1d..2f1f0c6ab4f 100644 --- a/salt/modules/junos.py +++ b/salt/modules/junos.py @@ -2051,7 +2051,7 @@ def _make_source_list(dir): @_timeout_decorator -def file_compare(file1, file2, **kwargs): +def file_compare(file1, file2, **kwargs): # pragma: no cover """ Compare two files and return a dictionary indicating if they are different. @@ -2113,7 +2113,7 @@ def file_compare(file1, file2, **kwargs): @_timeout_decorator -def fsentry_exists(dir, **kwargs): +def fsentry_exists(dir, **kwargs): # pragma: no cover """ Returns a dictionary indicating if `dir` refers to a file or a non-file (generally a directory) in the file system, @@ -2258,7 +2258,7 @@ def routing_engine(**kwargs): @_timeout_decorator -def dir_copy(source, dest, force=False, **kwargs): +def dir_copy(source, dest, force=False, **kwargs): # pragma: no cover """ Copy a directory and recursively its contents from source to dest. diff --git a/salt/modules/selinux.py b/salt/modules/selinux.py index 9b584c5c722..55b18ed9d47 100644 --- a/salt/modules/selinux.py +++ b/salt/modules/selinux.py @@ -617,7 +617,7 @@ def _fcontext_add_or_delete_policy( if "add" == action: # need to use --modify if context for name file exists, otherwise ValueError filespec = re.escape(name) - cmd = f"semanage fcontext -l | egrep {filespec}" + cmd = f"semanage fcontext -l | egrep '{filespec}'" current_entry_text = __salt__["cmd.shell"](cmd, ignore_retcode=True) if current_entry_text != "": action = "modify" diff --git a/salt/modules/test.py b/salt/modules/test.py index 83e8fdd0cd4..3cd9d7d5ea4 100644 --- a/salt/modules/test.py +++ b/salt/modules/test.py @@ -18,6 +18,7 @@ import salt.utils.hashutils import salt.utils.platform import salt.utils.versions import salt.version +from salt.config import DEFAULT_HASH_TYPE from salt.utils.decorators import depends __proxyenabled__ = ["*"] @@ -528,7 +529,7 @@ def random_hash(size=9999999999, hash_type=None): salt '*' test.random_hash hash_type=sha512 """ if not hash_type: - hash_type = __opts__.get("hash_type", "md5") + hash_type = __opts__.get("hash_type", DEFAULT_HASH_TYPE) return salt.utils.hashutils.random_hash(size=size, hash_type=hash_type) diff --git a/salt/modules/timezone.py b/salt/modules/timezone.py index 8c05d42cbb4..4904c8dcc6e 100644 --- a/salt/modules/timezone.py +++ b/salt/modules/timezone.py @@ -16,6 +16,7 @@ import salt.utils.itertools import salt.utils.path import salt.utils.platform import salt.utils.stringutils +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import CommandExecutionError, SaltInvocationError log = logging.getLogger(__name__) @@ -121,7 +122,7 @@ def _get_zone_etc_localtime(): tzfile, ) # Regular file. Try to match the hash. - hash_type = __opts__.get("hash_type", "md5") + hash_type = __opts__.get("hash_type", DEFAULT_HASH_TYPE) tzfile_hash = salt.utils.hashutils.get_hash(tzfile, hash_type) # Not a link, just a copy of the tzdata file for root, dirs, files in salt.utils.path.os_walk(tzdir): diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index 4151a383f4c..441b549fa50 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -4799,8 +4799,6 @@ class _policy_info: """ converts a list of pysid objects to string representations """ - if isinstance(val, str): - val = val.split(",") usernames = [] for _sid in val: try: @@ -4918,11 +4916,11 @@ class _policy_info: return None if value_lookup: if not isinstance(item, list): - return "Invalid Value" + return "Invalid Value: Not a list" ret_val = 0 else: if not isinstance(item, int): - return "Invalid Value" + return "Invalid Value: Not an int" ret_val = [] if "lookup" in kwargs: for k, v in kwargs["lookup"].items(): @@ -4937,7 +4935,7 @@ class _policy_info: if do_test and isinstance(k, int) and item & k == k: ret_val.append(v) else: - return "Invalid Value" + return "Invalid Value: No lookup passed" return ret_val @classmethod diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index e3e4a689b04..9f3d9d27e6d 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -14,11 +14,10 @@ Support for YUM/DNF .. versionadded:: 3003 Support for ``tdnf`` on Photon OS. + .. versionadded:: 3007.0 Support for ``dnf5``` on Fedora 39 """ - - import configparser import contextlib import datetime @@ -31,7 +30,6 @@ import string import salt.utils.args import salt.utils.data -import salt.utils.decorators.path import salt.utils.environment import salt.utils.files import salt.utils.functools @@ -45,13 +43,6 @@ import salt.utils.versions from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from salt.utils.versions import LooseVersion -try: - import yum - - HAS_YUM = True -except ImportError: - HAS_YUM = False - log = logging.getLogger(__name__) __HOLD_PATTERN = r"[\w+]+(?:[.-][^-]+)*" @@ -354,67 +345,46 @@ def _get_yum_config(strict_parser=True): This is currently only used to get the reposdir settings, but could be used for other things if needed. - If the yum python library is available, use that, which will give us all of - the options, including all of the defaults not specified in the yum config. - Additionally, they will all be of the correct object type. - - If the yum library is not available, we try to read the yum.conf - directly ourselves with a minimal set of "defaults". + We try to read the yum.conf directly ourselves with a minimal set of + "defaults". """ # in case of any non-fatal failures, these defaults will be used conf = { "reposdir": ["/etc/yum/repos.d", "/etc/yum.repos.d"], } - if HAS_YUM: - try: - yb = yum.YumBase() - yb.preconf.init_plugins = False - for name, value in yb.conf.items(): - conf[name] = value - except (AttributeError, yum.Errors.ConfigError) as exc: - raise CommandExecutionError(f"Could not query yum config: {exc}") - except yum.Errors.YumBaseError as yum_base_error: - raise CommandExecutionError( - f"Error accessing yum or rpmdb: {yum_base_error}" - ) + # fall back to parsing the config ourselves + # Look for the config the same order yum does + fn = None + paths = ( + "/etc/yum/yum.conf", + "/etc/yum.conf", + "/etc/dnf/dnf.conf", + "/etc/tdnf/tdnf.conf", + ) + for path in paths: + if os.path.exists(path): + fn = path + break + + if not fn: + raise CommandExecutionError(f"No suitable yum config file found in: {paths}") + + cp = configparser.ConfigParser(strict=strict_parser) + try: + cp.read(fn) + except OSError as exc: + raise CommandExecutionError(f"Unable to read from {fn}: {exc}") + + if cp.has_section("main"): + for opt in cp.options("main"): + if opt in ("reposdir", "commands", "excludes"): + # these options are expected to be lists + conf[opt] = [x.strip() for x in cp.get("main", opt).split(",")] + else: + conf[opt] = cp.get("main", opt) else: - # fall back to parsing the config ourselves - # Look for the config the same order yum does - fn = None - paths = ( - "/etc/yum/yum.conf", - "/etc/yum.conf", - "/etc/dnf/dnf.conf", - "/etc/tdnf/tdnf.conf", - ) - for path in paths: - if os.path.exists(path): - fn = path - break - - if not fn: - raise CommandExecutionError( - f"No suitable yum config file found in: {paths}" - ) - - cp = configparser.ConfigParser(strict=strict_parser) - try: - cp.read(fn) - except OSError as exc: - raise CommandExecutionError(f"Unable to read from {fn}: {exc}") - - if cp.has_section("main"): - for opt in cp.options("main"): - if opt in ("reposdir", "commands", "excludes"): - # these options are expected to be lists - conf[opt] = [x.strip() for x in cp.get("main", opt).split(",")] - else: - conf[opt] = cp.get("main", opt) - else: - log.warning( - "Could not find [main] section in %s, using internal defaults", fn - ) + log.warning("Could not find [main] section in %s, using internal defaults", fn) return conf @@ -2848,7 +2818,7 @@ def group_install(name, skip=(), include=(), **kwargs): if not pkgs: return {} - return install(pkgs=pkgs, **kwargs) + return install(pkgs=list(set(pkgs)), **kwargs) groupinstall = salt.utils.functools.alias_function(group_install, "groupinstall") @@ -3345,7 +3315,6 @@ def modified(*packages, **flags): return __salt__["lowpkg.modified"](*packages, **flags) -@salt.utils.decorators.path.which("yumdownloader") def download(*packages, **kwargs): """ .. versionadded:: 2015.5.0 @@ -3365,6 +3334,9 @@ def download(*packages, **kwargs): salt '*' pkg.download httpd salt '*' pkg.download httpd postfix """ + if not salt.utils.path.which("yumdownloader"): + raise CommandExecutionError("'yumdownloader' command not available") + if not packages: raise SaltInvocationError("No packages were specified") diff --git a/salt/netapi/rest_tornado/__init__.py b/salt/netapi/rest_tornado/__init__.py index 08c8c2027a2..d6d94fa7dc4 100644 --- a/salt/netapi/rest_tornado/__init__.py +++ b/salt/netapi/rest_tornado/__init__.py @@ -3,6 +3,7 @@ import logging import os import salt.auth +from salt.config import DEFAULT_HASH_TYPE from salt.utils.versions import Version __virtualname__ = os.path.abspath(__file__).rsplit(os.sep)[-2] or "rest_tornado" @@ -59,10 +60,12 @@ def get_application(opts): from . import saltnado_websockets token_pattern = r"([0-9A-Fa-f]{{{0}}})".format( - len(getattr(hashlib, opts.get("hash_type", "md5"))().hexdigest()) + len( + getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE))().hexdigest() + ) ) - all_events_pattern = r"/all_events/{}".format(token_pattern) - formatted_events_pattern = r"/formatted_events/{}".format(token_pattern) + all_events_pattern = rf"/all_events/{token_pattern}" + formatted_events_pattern = rf"/formatted_events/{token_pattern}" log.debug("All events URL pattern is %s", all_events_pattern) paths += [ # Matches /all_events/[0-9A-Fa-f]{n} diff --git a/salt/pillar/hg_pillar.py b/salt/pillar/hg_pillar.py index 3a183a04568..b4ce24ac8a6 100644 --- a/salt/pillar/hg_pillar.py +++ b/salt/pillar/hg_pillar.py @@ -23,6 +23,7 @@ import os import salt.pillar import salt.utils.stringutils +from salt.config import DEFAULT_HASH_TYPE try: import hglib @@ -90,7 +91,7 @@ class Repo: """Initialize a hg repo (or open it if it already exists)""" self.repo_uri = repo_uri cachedir = os.path.join(__opts__["cachedir"], "hg_pillar") - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(salt.utils.stringutils.to_bytes(repo_uri)).hexdigest() self.working_dir = os.path.join(cachedir, repo_hash) if not os.path.isdir(self.working_dir): diff --git a/salt/pillar/sql_base.py b/salt/pillar/sql_base.py index 372dced91cc..3edd3ad0a87 100644 --- a/salt/pillar/sql_base.py +++ b/salt/pillar/sql_base.py @@ -198,22 +198,20 @@ More complete example for MySQL (to also show configuration) with_lists: [1,3] """ -import abc # Added in python2.6 so always available +import abc import logging from salt.utils.dictupdate import update from salt.utils.odict import OrderedDict +log = logging.getLogger(__name__) + # Please don't strip redundant parentheses from this file. # I have added some for clarity. # tests/unit/pillar/mysql_test.py may help understand this code. -# Set up logging -log = logging.getLogger(__name__) - - # This ext_pillar is abstract and cannot be used directory def __virtual__(): return False diff --git a/salt/states/file.py b/salt/states/file.py index 4f53b1e485d..d41895e1515 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -721,6 +721,7 @@ def _check_directory( exclude_pat=None, max_depth=None, follow_symlinks=False, + children_only=False, ): """ Check what changes need to be made on a directory @@ -792,10 +793,12 @@ def _check_directory( ) if fchange: changes[path] = fchange - # Recurse skips root (we always do dirs, not root), so always check root: - fchange = _check_dir_meta(name, user, group, dir_mode, follow_symlinks) - if fchange: - changes[name] = fchange + # Recurse skips root (we always do dirs, not root), so check root unless + # children_only is specified: + if not children_only: + fchange = _check_dir_meta(name, user, group, dir_mode, follow_symlinks) + if fchange: + changes[name] = fchange if clean: keep = _gen_keep_files(name, require, walk_d) @@ -3955,6 +3958,7 @@ def directory( exclude_pat, max_depth, follow_symlinks, + children_only, ) if tchanges: diff --git a/salt/tokens/localfs.py b/salt/tokens/localfs.py index 99a239d62f1..61c2d945ad3 100644 --- a/salt/tokens/localfs.py +++ b/salt/tokens/localfs.py @@ -11,6 +11,7 @@ import salt.payload import salt.utils.files import salt.utils.path import salt.utils.verify +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -27,7 +28,7 @@ def mk_token(opts, tdata): :param tdata: Token data to be stored with 'token' attribute of this dict set to the token. :returns: tdata with token if successful. Empty dict if failed. """ - hash_type = getattr(hashlib, opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE)) tok = str(hash_type(os.urandom(512)).hexdigest()) t_path = os.path.join(opts["token_dir"], tok) temp_t_path = "{}.tmp".format(t_path) diff --git a/salt/tokens/rediscluster.py b/salt/tokens/rediscluster.py index 241fe64b869..dc9bb44d3ea 100644 --- a/salt/tokens/rediscluster.py +++ b/salt/tokens/rediscluster.py @@ -13,12 +13,12 @@ Default values for these configs are as follow: :depends: - redis-py-cluster Python package """ - import hashlib import logging import os import salt.payload +from salt.config import DEFAULT_HASH_TYPE try: import rediscluster @@ -74,7 +74,7 @@ def mk_token(opts, tdata): redis_client = _redis_client(opts) if not redis_client: return {} - hash_type = getattr(hashlib, opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE)) tok = str(hash_type(os.urandom(512)).hexdigest()) try: while redis_client.get(tok) is not None: diff --git a/salt/transport/base.py b/salt/transport/base.py index cd999bd1496..08103cb28c4 100644 --- a/salt/transport/base.py +++ b/salt/transport/base.py @@ -1,5 +1,7 @@ import hashlib import os +import traceback +import warnings import salt.utils.stringutils @@ -198,14 +200,52 @@ def ipc_publish_server(node, opts): return publish_server(opts, **kwargs) -class RequestClient: +class TransportWarning(Warning): + """ + Transport warning. + """ + + +class Transport: + def __init__(self, *args, **kwargs): + self._trace = "\n".join(traceback.format_stack()[:-1]) + if not hasattr(self, "_closing"): + self._closing = False + if not hasattr(self, "_connect_called"): + self._connect_called = False + + def connect(self, *args, **kwargs): + self._connect_called = True + + # pylint: disable=W1701 + def __del__(self): + """ + Warn the user if the transport's close method was never called. + + If the _closing attribute is missing we won't raise a warning. This + prevents issues when class's dunder init method is called with improper + arguments, and is later getting garbage collected. Users of this class + should take care to call super() and validate the functionality with a + test. + """ + if getattr(self, "_connect_called") and not getattr(self, "_closing", True): + warnings.warn( + f"Unclosed transport! {self!r} \n{self._trace}", + TransportWarning, + source=self, + ) + + # pylint: enable=W1701 + + +class RequestClient(Transport): """ The RequestClient transport is used to make requests and get corresponding replies from the RequestServer. """ def __init__(self, opts, io_loop, **kwargs): - pass + super().__init__() async def send(self, load, timeout=60): """ @@ -219,7 +259,7 @@ class RequestClient: """ raise NotImplementedError - def connect(self): + def connect(self): # pylint: disable=W0221 """ Connect to the server / broker. """ @@ -300,13 +340,13 @@ class DaemonizedPublishServer(PublishServer): raise NotImplementedError -class PublishClient: +class PublishClient(Transport): """ The PublishClient receives messages from the PublishServer and runs a callback. """ def __init__(self, opts, io_loop, **kwargs): - pass + super().__init__() def on_recv(self, callback): """ @@ -314,7 +354,7 @@ class PublishClient: """ raise NotImplementedError - async def connect( + async def connect( # pylint: disable=arguments-differ self, port=None, connect_callback=None, disconnect_callback=None, timeout=None ): """ diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index 650b7e879eb..b63675e1682 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -218,6 +218,7 @@ class TCPPubClient(salt.transport.base.PublishClient): ] def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 + super().__init__(opts, io_loop, **kwargs) self.opts = opts self.io_loop = io_loop self.unpacker = salt.utils.msgpack.Unpacker() @@ -260,15 +261,6 @@ class TCPPubClient(salt.transport.base.PublishClient): self._stream = None self._closed = True - # pylint: disable=W1701 - def __del__(self): - if not self._closing: - warnings.warn( - "unclosed publish client {self!r}", ResourceWarning, source=self - ) - - # pylint: enable=W1701 - async def getstream(self, **kwargs): if self.source_ip or self.source_port: kwargs.update(source_ip=self.source_ip, source_port=self.source_port) @@ -327,6 +319,7 @@ class TCPPubClient(salt.transport.base.PublishClient): async def _connect(self, timeout=None): if self._stream is None: + self._connect_called = True self._closing = False self._closed = False self._stream = await self.getstream(timeout=timeout) @@ -1621,6 +1614,7 @@ class TCPReqClient(salt.transport.base.RequestClient): ttype = "tcp" def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 + super().__init__(opts, io_loop, **kwargs) self.opts = opts self.io_loop = io_loop @@ -1676,6 +1670,7 @@ class TCPReqClient(salt.transport.base.RequestClient): async def connect(self): if self._stream is None: + self._connect_called = True self._stream = await self.getstream() if self._stream: if not self._stream_return_running: @@ -1807,4 +1802,8 @@ class TCPReqClient(salt.transport.base.RequestClient): return recv def close(self): - self._stream.close() + if self._closing: + return + if self._stream is not None: + self._stream.close() + self._stream = None diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index e4fcb5dd9f7..f9cafa1d469 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -254,7 +254,7 @@ class PublishClient(salt.transport.base.PublishClient): async def connect( self, port=None, connect_callback=None, disconnect_callback=None, timeout=None ): - self.connect_called = True + self._connect_called = True if port is not None: self.port = port if self.path: @@ -277,7 +277,7 @@ class PublishClient(salt.transport.base.PublishClient): await connect_callback(True) async def connect_uri(self, uri, connect_callback=None, disconnect_callback=None): - self.connect_called = True + self._connect_called = True log.debug("Connecting the publisher client to: %s", uri) # log.debug("%r connecting to %s", self, self.master_pub) self.uri = uri @@ -647,14 +647,8 @@ class AsyncReqMessageClient: # wire up sockets self._init_socket() - # TODO: timeout all in-flight sessions, or error def close(self): - try: - if self._closing: - return - except AttributeError: - # We must have been called from __del__ - # The python interpreter has nuked most attributes already + if self._closing: return else: self._closing = True @@ -794,7 +788,10 @@ class ZeroMQSocketMonitor: def stop(self): if self._socket is None: return - self._socket.disable_monitor() + try: + self._socket.disable_monitor() + except zmq.Error: + pass self._socket = None self._running.clear() self._monitor_socket = None @@ -1037,6 +1034,7 @@ class RequestClient(salt.transport.base.RequestClient): ttype = "zeromq" def __init__(self, opts, io_loop, linger=0): # pylint: disable=W0231 + super().__init__(opts, io_loop) self.opts = opts # XXX Support host, port, path, instead of using get_master_uri self.master_uri = self.get_master_uri(opts) @@ -1055,6 +1053,7 @@ class RequestClient(salt.transport.base.RequestClient): async def connect(self): if self.socket is None: + self._connect_called = True self._closing = False # wire up sockets self._init_socket() diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py index a0843130593..3e026a0bb57 100644 --- a/salt/utils/cloud.py +++ b/salt/utils/cloud.py @@ -63,7 +63,7 @@ try: from pypsexec.client import Client as PsExecClient from pypsexec.exceptions import SCMRException from pypsexec.scmr import Service as ScmrService - from smbprotocol.exceptions import SMBResponseException + from smbprotocol.exceptions import CannotDelete, SMBResponseException from smbprotocol.tree import TreeConnect logging.getLogger("smbprotocol").setLevel(logging.WARNING) @@ -910,7 +910,12 @@ class Client: return self._client.connect() def disconnect(self): - self._client.cleanup() # This removes the lingering PAExec binary + try: + # This removes any lingering PAExec binaries + self._client.cleanup() + except CannotDelete as exc: + # We shouldn't hard crash here, so just log the error + log.debug("Exception cleaning up PAexec: %r", exc) return self._client.disconnect() def create_service(self): diff --git a/salt/utils/extmods.py b/salt/utils/extmods.py index cda0fbbd595..8601cfeedbc 100644 --- a/salt/utils/extmods.py +++ b/salt/utils/extmods.py @@ -11,6 +11,7 @@ import salt.utils.files import salt.utils.hashutils import salt.utils.path import salt.utils.url +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -123,7 +124,7 @@ def sync( log.info("Copying '%s' to '%s'", fn_, dest) if os.path.isfile(dest): # The file is present, if the sum differs replace it - hash_type = opts.get("hash_type", "md5") + hash_type = opts.get("hash_type", DEFAULT_HASH_TYPE) src_digest = salt.utils.hashutils.get_hash(fn_, hash_type) dst_digest = salt.utils.hashutils.get_hash(dest, hash_type) if src_digest != dst_digest: diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 4cc3a2543d2..a7ab90d62ab 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -2,7 +2,6 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo """ - import base64 import contextlib import copy @@ -38,6 +37,7 @@ import salt.utils.stringutils import salt.utils.url import salt.utils.user import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS from salt.exceptions import FileserverConfigError, GitLockError, get_error_message from salt.utils.event import tagify @@ -459,7 +459,7 @@ class GitProvider: if hasattr(self, "name"): self._cache_basehash = self.name else: - hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, self.opts.get("hash_type", DEFAULT_HASH_TYPE)) # We loaded this data from yaml configuration files, so, its safe # to use UTF-8 self._cache_basehash = str( diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py index d90957a0087..898c8d3fc0d 100644 --- a/salt/utils/jinja.py +++ b/salt/utils/jinja.py @@ -127,7 +127,7 @@ class SaltCacheLoader(BaseLoader): the importing file. """ - # FIXME: somewhere do seprataor replacement: '\\' => '/' + # FIXME: somewhere do separator replacement: '\\' => '/' _template = template if template.split("/", 1)[0] in ("..", "."): is_relative = True @@ -136,7 +136,6 @@ class SaltCacheLoader(BaseLoader): # checks for relative '..' paths that step-out of file_roots if is_relative: # Starts with a relative path indicator - if not environment or "tpldir" not in environment.globals: log.warning( 'Relative path "%s" cannot be resolved without an environment', diff --git a/salt/utils/network.py b/salt/utils/network.py index 3410788156c..fae91e01ecb 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py @@ -674,6 +674,7 @@ def cidr_to_ipv4_netmask(cidr_bits): else: netmask += f"{256 - (2 ** (8 - cidr_bits)):d}" cidr_bits = 0 + return netmask @@ -682,8 +683,14 @@ def _number_of_set_bits_to_ipv4_netmask(set_bits): Returns an IPv4 netmask from the integer representation of that mask. Ex. 0xffffff00 -> '255.255.255.0' + 0xffff6400 -> '255.255.100.0' """ - return cidr_to_ipv4_netmask(_number_of_set_bits(set_bits)) + # Note: previously used cidr but that is counting number of bits in set_bits + # and can lead to wrong netmaks values, for example: + # 0xFFFF6400 is 255.255.100.0, 0x64 is 100 decimal + # but if convert to cidr first, it gives 19 bits, get 255.255.224.0 - WRONG + # leveraging Python ip_address library for different method of conversion + return str(ipaddress.ip_address(set_bits)) def _number_of_set_bits(x): @@ -1000,7 +1007,7 @@ def _netbsd_interfaces_ifconfig(out): return ret -def _junos_interfaces_ifconfig(out): +def _junos_interfaces_ifconfig(out): # pragma: no cover """ Uses ifconfig to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) @@ -1070,7 +1077,7 @@ def _junos_interfaces_ifconfig(out): return ret -def junos_interfaces(): +def junos_interfaces(): # pragma: no cover """ Obtain interface information for Junos; ifconfig output diverged from other BSD variants (Netmask is now part of the @@ -1235,7 +1242,7 @@ def _get_iface_info(iface): return None, error_msg -def _hw_addr_aix(iface): +def _hw_addr_aix(iface): # pragma: no cover """ Return the hardware address (a.k.a. MAC address) for a given interface on AIX MAC address not available in through interfaces @@ -1273,7 +1280,7 @@ def hw_addr(iface): """ if salt.utils.platform.is_aix(): - return _hw_addr_aix + return _hw_addr_aix(iface) iface_info, error = _get_iface_info(iface) @@ -1742,7 +1749,7 @@ def _netlink_tool_remote_on(port, which_end): return remotes -def _sunos_remotes_on(port, which_end): +def _sunos_remotes_on(port, which_end): # pragma: no cover """ SunOS specific helper function. Returns set of ipv4 host addresses of remote established connections @@ -1782,7 +1789,7 @@ def _sunos_remotes_on(port, which_end): return remotes -def _freebsd_remotes_on(port, which_end): +def _freebsd_remotes_on(port, which_end): # pragma: no cover """ Returns set of ipv4 host addresses of remote established connections on local tcp port port. @@ -1844,7 +1851,7 @@ def _freebsd_remotes_on(port, which_end): return remotes -def _netbsd_remotes_on(port, which_end): +def _netbsd_remotes_on(port, which_end): # pragma: no cover """ Returns set of ipv4 host addresses of remote established connections on local tcp port port. @@ -1905,7 +1912,7 @@ def _netbsd_remotes_on(port, which_end): return remotes -def _openbsd_remotes_on(port, which_end): +def _openbsd_remotes_on(port, which_end): # pragma: no cover """ OpenBSD specific helper function. Returns set of ipv4 host addresses of remote established connections @@ -2049,7 +2056,7 @@ def _linux_remotes_on(port, which_end): return remotes -def _aix_remotes_on(port, which_end): +def _aix_remotes_on(port, which_end): # pragma: no cover """ AIX specific helper function. Returns set of ipv4 host addresses of remote established connections diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py index 788d76282e3..181e2b55fc8 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -454,6 +454,7 @@ class SaltfileMixIn(metaclass=MixInMeta): if value != default: # The user passed an argument, we won't override it with the # one from Saltfile, if any + cli_config.pop(option.dest) continue # We reached this far! Set the Saltfile value on the option @@ -477,6 +478,7 @@ class SaltfileMixIn(metaclass=MixInMeta): if value != default: # The user passed an argument, we won't override it with # the one from Saltfile, if any + cli_config.pop(option.dest) continue setattr(self.options, option.dest, cli_config[option.dest]) diff --git a/salt/utils/pyinstaller/__init__.py b/salt/utils/pyinstaller/__init__.py deleted file mode 100644 index eb8a6a85fb4..00000000000 --- a/salt/utils/pyinstaller/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -This module exists to help PyInstaller bundle Salt -""" -import pathlib - -PYINSTALLER_UTILS_DIR_PATH = pathlib.Path(__file__).resolve().parent - - -def get_hook_dirs(): - """ - Return a list of paths that PyInstaller can search for hooks. - """ - hook_dirs = {PYINSTALLER_UTILS_DIR_PATH} - for path in PYINSTALLER_UTILS_DIR_PATH.iterdir(): - if not path.is_dir(): - continue - if "__pycache__" in path.parts: - continue - hook_dirs.add(path) - - return sorted(str(p) for p in hook_dirs) diff --git a/salt/utils/pyinstaller/hook-salt.py b/salt/utils/pyinstaller/hook-salt.py deleted file mode 100644 index cad74ffd98c..00000000000 --- a/salt/utils/pyinstaller/hook-salt.py +++ /dev/null @@ -1,146 +0,0 @@ -# pylint: disable=3rd-party-module-not-gated - -import logging -import pathlib -import sys - -from PyInstaller.utils import hooks - -log = logging.getLogger(__name__) - - -def _filter_stdlib_tests(name): - """ - Filter out non useful modules from the stdlib - """ - if ".test." in name: - return False - if ".tests." in name: - return False - if ".idle_test" in name: - return False - return True - - -def _python_stdlib_path(): - """ - Return the path to the standard library folder - """ - base_exec_prefix = pathlib.Path(sys.base_exec_prefix) - log.info("Grabbing 'base_exec_prefix' for platform: %s", sys.platform) - if not sys.platform.lower().startswith("win"): - return base_exec_prefix / "lib" / "python{}.{}".format(*sys.version_info) - return base_exec_prefix / "Lib" - - -def _collect_python_stdlib_hidden_imports(): - """ - Collect all of the standard library(most of it) as hidden imports. - """ - _hidden_imports = set() - - stdlib = _python_stdlib_path() - if not stdlib.exists(): - log.error("The path '%s' does not exist", stdlib) - return list(_hidden_imports) - - log.info( - "Collecting hidden imports from the python standard library at: %s", - stdlib, - ) - for path in stdlib.glob("*"): - if path.is_dir(): - if path.name in ( - "__pycache__", - "site-packages", - "test", - "turtledemo", - "ensurepip", - ): - continue - if path.joinpath("__init__.py").is_file(): - log.info("Collecting: %s", path.name) - try: - _module_hidden_imports = hooks.collect_submodules( - path.name, filter=_filter_stdlib_tests - ) - log.debug("Collected(%s): %s", path.name, _module_hidden_imports) - _hidden_imports.update(set(_module_hidden_imports)) - except Exception as exc: # pylint: disable=broad-except - log.error("Failed to collect %r: %s", path.name, exc) - continue - if path.suffix not in (".py", ".pyc", ".pyo"): - continue - _hidden_imports.add(path.stem) - log.info("Collected stdlib hidden imports: %s", sorted(_hidden_imports)) - return sorted(_hidden_imports) - - -def _collect_python_stdlib_dynamic_libraries(): - """ - Collect all of the standard library(most of it) dynamic libraries. - """ - _dynamic_libs = set() - - stdlib = _python_stdlib_path() - if not stdlib.exists(): - log.error("The path '%s' does not exist", stdlib) - return list(_dynamic_libs) - - log.info( - "Collecting dynamic libraries from the python standard library at: %s", - stdlib, - ) - for path in stdlib.glob("*"): - if not path.is_dir(): - continue - if path.name in ( - "__pycache__", - "site-packages", - "test", - "turtledemo", - "ensurepip", - ): - continue - if path.joinpath("__init__.py").is_file(): - log.info("Collecting: %s", path.name) - try: - _module_dynamic_libs = hooks.collect_dynamic_libs(path.name, path.name) - log.debug("Collected(%s): %s", path.name, _module_dynamic_libs) - _dynamic_libs.update(set(_module_dynamic_libs)) - except Exception as exc: # pylint: disable=broad-except - log.error("Failed to collect %r: %s", path.name, exc) - log.info("Collected stdlib dynamic libs: %s", sorted(_dynamic_libs)) - return sorted(_dynamic_libs) - - -def _filter_submodules(name): - # this should never happen, but serves as a place-holder for when/if we have to filter - if not name.startswith("salt"): - return False - return True - - -# Collect Salt datas, binaries(should be None) and hidden imports -SALT_DATAS, SALT_BINARIES, SALT_HIDDENIMPORTS = hooks.collect_all( - "salt", - include_py_files=True, - filter_submodules=_filter_submodules, -) - -# In case there's salt-extensions installed, collect their datas and hidden imports -SALT_EXTENSIONS_DATAS, SALT_EXTENSIONS_HIDDENIMPORTS = hooks.collect_entry_point( - "salt.loader" -) - - -# PyInstaller attributes -datas = sorted(set(SALT_DATAS + SALT_EXTENSIONS_DATAS)) -binaries = sorted(set(SALT_BINARIES)) -hiddenimports = sorted( - set( - SALT_HIDDENIMPORTS - + SALT_EXTENSIONS_HIDDENIMPORTS - + _collect_python_stdlib_hidden_imports() - ) -) diff --git a/salt/utils/pyinstaller/rthooks.dat b/salt/utils/pyinstaller/rthooks.dat deleted file mode 100644 index b54f09a1df4..00000000000 --- a/salt/utils/pyinstaller/rthooks.dat +++ /dev/null @@ -1,4 +0,0 @@ -{ - "subprocess": ["pyi_rth_subprocess.py"], - "salt.utils.vt": ["pyi_rth_salt.utils.vt.py"], -} diff --git a/salt/utils/pyinstaller/rthooks/__init__.py b/salt/utils/pyinstaller/rthooks/__init__.py deleted file mode 100644 index 00c319dfa30..00000000000 --- a/salt/utils/pyinstaller/rthooks/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains support code to package Salt with PyInstaller. -""" diff --git a/salt/utils/pyinstaller/rthooks/_overrides.py b/salt/utils/pyinstaller/rthooks/_overrides.py deleted file mode 100644 index ad422aeb7ed..00000000000 --- a/salt/utils/pyinstaller/rthooks/_overrides.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -This package contains the runtime hooks support code for when Salt is pacakged with PyInstaller. -""" -import io -import logging -import os -import subprocess -import sys - -import salt.utils.vt - -log = logging.getLogger(__name__) - - -def clean_pyinstaller_vars(environ): - """ - Restore or cleanup PyInstaller specific environent variable behavior. - """ - if environ is None: - environ = dict(os.environ) - # When Salt is bundled with tiamat, it MUST NOT contain LD_LIBRARY_PATH - # when shelling out, or, at least the value of LD_LIBRARY_PATH set by - # pyinstaller. - # See: - # https://pyinstaller.readthedocs.io/en/stable/runtime-information.html#ld-library-path-libpath-considerations - for varname in ("LD_LIBRARY_PATH", "LIBPATH"): - original_varname = "{}_ORIG".format(varname) - if varname in environ and environ[varname] == sys._MEIPASS: - # If we find the varname on the user provided environment we need to at least - # check if it's not the value set by PyInstaller, if it is, remove it. - log.debug( - "User provided environment variable %r with value %r which is " - "the value that PyInstaller set's. Removing it", - varname, - environ[varname], - ) - environ.pop(varname) - - if original_varname in environ and varname not in environ: - # We found the original variable set by PyInstaller, and we didn't find - # any user provided variable, let's rename it. - log.debug( - "The %r variable was found in the passed environment, renaming it to %r", - original_varname, - varname, - ) - environ[varname] = environ.pop(original_varname) - - if varname not in environ: - if original_varname in os.environ: - log.debug( - "Renaming environment variable %r to %r", original_varname, varname - ) - environ[varname] = os.environ[original_varname] - elif varname in os.environ: - # Override the system environ variable with an empty one - log.debug("Setting environment variable %r to an empty string", varname) - environ[varname] = "" - return environ - - -class PyinstallerPopen(subprocess.Popen): - def __init__(self, *args, **kwargs): - kwargs["env"] = clean_pyinstaller_vars(kwargs.pop("env", None)) - super().__init__(*args, **kwargs) - - # From https://github.com/pyinstaller/pyinstaller/blob/v5.1/PyInstaller/hooks/rthooks/pyi_rth_subprocess.py - # - # In windowed mode, force any unused pipes (stdin, stdout and stderr) to be DEVNULL instead of inheriting the - # invalid corresponding handles from this parent process. - if sys.platform == "win32" and not isinstance(sys.stdout, io.IOBase): - - def _get_handles(self, stdin, stdout, stderr): - stdin, stdout, stderr = ( - subprocess.DEVNULL if pipe is None else pipe - for pipe in (stdin, stdout, stderr) - ) - return super()._get_handles(stdin, stdout, stderr) - - -class PyinstallerTerminal(salt.utils.vt.Terminal): # pylint: disable=abstract-method - def __init__(self, *args, **kwargs): - kwargs["env"] = clean_pyinstaller_vars(kwargs.pop("env", None)) - super().__init__(*args, **kwargs) diff --git a/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py b/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py deleted file mode 100644 index f16a9d954e0..00000000000 --- a/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -PyInstaller runtime hook to patch salt.utils.vt.Terminal -""" -import logging - -import salt.utils.vt -from salt.utils.pyinstaller.rthooks._overrides import PyinstallerTerminal - -log = logging.getLogger(__name__) -# Patch salt.utils.vt.Terminal when running within a pyinstalled bundled package -salt.utils.vt.Terminal = PyinstallerTerminal - -log.debug("Replaced 'salt.utils.vt.Terminal' with 'PyinstallerTerminal'") diff --git a/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py b/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py deleted file mode 100644 index a00ad7fc33b..00000000000 --- a/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -PyInstaller runtime hook to patch subprocess.Popen -""" -import logging -import subprocess - -from salt.utils.pyinstaller.rthooks._overrides import PyinstallerPopen - -log = logging.getLogger(__name__) -# Patch subprocess.Popen when running within a pyinstalled bundled package -subprocess.Popen = PyinstallerPopen - -log.debug("Replaced 'subprocess.Popen' with 'PyinstallerTerminal'") diff --git a/setup.cfg b/setup.cfg index f99baf45528..2f452d87695 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,10 +3,22 @@ owner = root group = root [mypy] +packages = tools +exclude = (?x)( + salt + | tests + ).*\.py implicit_optional = True show_error_codes = True warn_return_any = True warn_unused_configs = True -[mypy.tools] +[mypy-tools.*] +ignore_missing_imports = True + +[mypy-tools.precommit.docstrings] +follow_imports = silent + +[mypy-salt.*] +follow_imports = silent ignore_missing_imports = True diff --git a/tasks/README.md b/tasks/README.md deleted file mode 100644 index 6ff3fb10a7d..00000000000 --- a/tasks/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# What is this directory? - -This directory contains python scripts which should be called by [invoke](https://pypi.org/project/invoke). - -Instead of having several multi-purpose python scripts scatered through multiple paths in the salt code base, -we will now concentrate them under an invoke task. - -## Calling Invoke - -Invoke can be called in the following ways. - -### Installed system-wide - -If invoke is installed system-wide, be sure you also have `blessings` installed if you want coloured output, although -it's not a hard requirement. - -``` -inv docs.check -``` - -### Using Nox - -Since salt already uses nox, and nox manages virtual environments and respective requirements, calling invoke is as -simple as: - -``` -nox -e invoke -- docs.check -``` diff --git a/tasks/__init__.py b/tasks/__init__.py deleted file mode 100644 index 5f5aac88cb8..00000000000 --- a/tasks/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from invoke import Collection # pylint: disable=3rd-party-module-not-gated - -from . import docs, docstrings, filemap, loader - -ns = Collection() -ns.add_collection(Collection.from_module(docs, name="docs"), name="docs") -ns.add_collection( - Collection.from_module(docstrings, name="docstrings"), name="docstrings" -) -ns.add_collection(Collection.from_module(loader, name="loader"), name="loader") -ns.add_collection(Collection.from_module(filemap, name="filemap"), name="filemap") diff --git a/tasks/filemap.py b/tasks/filemap.py deleted file mode 100644 index a1eb62c6b82..00000000000 --- a/tasks/filemap.py +++ /dev/null @@ -1,95 +0,0 @@ -""" - tasks.filemap - ~~~~~~~~~~~~~ - - tests/filename_map.yml validity checks -""" -import pathlib -import re - -import yaml -from invoke import task # pylint: disable=3rd-party-module-not-gated - -from tasks import utils - -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -FILENAME_MAP_PATH = CODE_DIR / "tests" / "filename_map.yml" - - -def _match_to_test_file(match): - tests_path = CODE_DIR / "tests" - parts = match.split(".") - parts[-1] += ".py" - return tests_path.joinpath(*parts).relative_to(CODE_DIR) - - -def _check_matches(rule, matches): - errors = 0 - for match in matches: - filematch = _match_to_test_file(match) - if not filematch.exists(): - utils.error( - "The match '{}' for rule '{}' points to a non existing test module" - " path: {}", - match, - rule, - filematch, - ) - errors += 1 - return errors - - -@task -def check(ctx): - exitcode = 0 - excludes = ("tasks/", "templates/", ".nox/") - full_filelist = [path.relative_to(CODE_DIR) for path in CODE_DIR.rglob("*.py")] - filelist = [ - str(path) for path in full_filelist if not str(path).startswith(excludes) - ] - filename_map = yaml.safe_load(FILENAME_MAP_PATH.read_text()) - checked = set() - for rule, matches in filename_map.items(): - if rule == "*": - exitcode += _check_matches(rule, matches) - elif "|" in rule: - # This is regex - for filepath in filelist: - if re.match(rule, filepath): - # Found at least one match, stop looking - break - else: - utils.error( - "Could not find a matching file in the salt repo for the rule '{}'", - rule, - ) - exitcode += 1 - continue - exitcode += _check_matches(rule, matches) - elif "*" in rule or "\\" in rule: - # Glob matching - process_matches = True - for filerule in CODE_DIR.glob(rule): - if not filerule.exists(): - utils.error( - "The rule '{}' points to a non existing path: {}", - rule, - filerule, - ) - exitcode += 1 - process_matches = False - if process_matches: - exitcode += _check_matches(rule, matches) - else: - # Direct file paths as rules - filerule = pathlib.Path(rule) - if not filerule.exists(): - utils.error( - "The rule '{}' points to a non existing path: {}", rule, filerule - ) - exitcode += 1 - continue - exitcode += _check_matches(rule, matches) - if exitcode: - utils.error("Found {} errors", exitcode) - utils.exit_invoke(exitcode) diff --git a/tasks/utils.py b/tasks/utils.py deleted file mode 100644 index e082508a5a3..00000000000 --- a/tasks/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -""" - tasks.utils - ~~~~~~~~~~~ - - Invoke utilities -""" - -import sys - -try: - from blessings import Terminal - - try: - terminal = Terminal() - HAS_BLESSINGS = True - except Exception: # pylint: disable=broad-except - terminal = None - HAS_BLESSINGS = False -except ImportError: - terminal = None - HAS_BLESSINGS = False - - -def exit_invoke(exitcode, message=None, *args, **kwargs): - if message is not None: - if exitcode > 0: - warn(message, *args, **kwargs) - else: - info(message, *args, **kwargs) - sys.exit(exitcode) - - -def info(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.green(message)) - write_message(message) - - -def warn(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.yellow(message)) - write_message(message) - - -def error(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.red(message)) - write_message(message) - - -def write_message(message): - sys.stderr.write(message) - if not message.endswith("\n"): - sys.stderr.write("\n") - sys.stderr.flush() diff --git a/tests/conftest.py b/tests/conftest.py index 69e150698e7..7c275c4eba6 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -67,6 +67,9 @@ else: # Flag coverage to track suprocesses by pointing it to the right .coveragerc file os.environ["COVERAGE_PROCESS_START"] = str(COVERAGERC_FILE) +# Variable defining a FIPS test run or not +FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1" + # Define the pytest plugins we rely on pytest_plugins = ["helpers_namespace"] @@ -1049,7 +1052,10 @@ def salt_syndic_master_factory( config_defaults["syndic_master"] = "localhost" config_defaults["transport"] = request.config.getoption("--transport") - config_overrides = {"log_level_logfile": "quiet"} + config_overrides = { + "log_level_logfile": "quiet", + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -1162,7 +1168,10 @@ def salt_master_factory( config_defaults["syndic_master"] = "localhost" config_defaults["transport"] = salt_syndic_master_factory.config["transport"] - config_overrides = {"log_level_logfile": "quiet"} + config_overrides = { + "log_level_logfile": "quiet", + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -1270,6 +1279,7 @@ def salt_minion_factory(salt_master_factory): "log_level_logfile": "quiet", "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -1301,6 +1311,7 @@ def salt_sub_minion_factory(salt_master_factory): "log_level_logfile": "quiet", "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/integration/cloud/clouds/test_digitalocean.py b/tests/integration/cloud/clouds/test_digitalocean.py index e92f57d8aa2..64ad0f17426 100644 --- a/tests/integration/cloud/clouds/test_digitalocean.py +++ b/tests/integration/cloud/clouds/test_digitalocean.py @@ -1,10 +1,11 @@ """ Integration tests for DigitalOcean APIv2 """ - import base64 import hashlib +import pytest + import salt.crypt import salt.utils.stringutils from tests.integration.cloud.helpers.cloud_test_base import TIMEOUT, CloudTest @@ -43,6 +44,7 @@ class DigitalOceanTest(CloudTest): _list_sizes = self.run_cloud("--list-sizes {}".format(self.PROVIDER)) self.assertIn("16gb", [i.strip() for i in _list_sizes]) + @pytest.mark.skip_on_fips_enabled_platform def test_key_management(self): """ Test key management diff --git a/tests/integration/externalapi/test_venafiapi.py b/tests/integration/externalapi/test_venafiapi.py index ad08605430f..c9d44dce50c 100644 --- a/tests/integration/externalapi/test_venafiapi.py +++ b/tests/integration/externalapi/test_venafiapi.py @@ -43,13 +43,10 @@ class VenafiTest(ShellCase): @with_random_name @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_request(self, name): cn = "{}.example.com".format(name) - # Provide python27 compatibility - if not isinstance(cn, str): - cn = cn.decode() - ret = self.run_run_plus( fun="venafi.request", minion_id=cn, @@ -126,10 +123,6 @@ xlAKgaU6i03jOm5+sww5L2YVMi1eeBN+kx7o94ogpRemC/EUidvl1PUJ6+e7an9V csr_path = f.name cn = "test-csr-32313131.venafi.example.com" - # Provide python27 compatibility - if not isinstance(cn, str): - cn = cn.decode() - ret = self.run_run_plus( fun="venafi.request", minion_id=cn, csr_path=csr_path, zone="fake" ) diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py index cd3e4c2f5ad..af873bb6784 100644 --- a/tests/integration/modules/test_cp.py +++ b/tests/integration/modules/test_cp.py @@ -89,12 +89,12 @@ class CPModuleTest(ModuleCase): """ src = os.path.join(RUNTIME_VARS.FILES, "file", "base", "file.big") with salt.utils.files.fopen(src, "rb") as fp_: - hash_str = hashlib.md5(fp_.read()).hexdigest() + hash_str = hashlib.sha256(fp_.read()).hexdigest() self.run_function("cp.get_file", ["salt://file.big", tgt], gzip=5) with salt.utils.files.fopen(tgt, "rb") as scene: data = scene.read() - self.assertEqual(hash_str, hashlib.md5(data).hexdigest()) + self.assertEqual(hash_str, hashlib.sha256(data).hexdigest()) data = salt.utils.stringutils.to_unicode(data) self.assertIn("KNIGHT: They're nervous, sire.", data) self.assertNotIn("bacon", data) diff --git a/tests/integration/modules/test_jinja.py b/tests/integration/modules/test_jinja.py deleted file mode 100644 index 70b45bf0f23..00000000000 --- a/tests/integration/modules/test_jinja.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -Test the jinja module -""" - -import os - -import salt.utils.files -import salt.utils.json -import salt.utils.yaml -from tests.support.case import ModuleCase -from tests.support.helpers import requires_system_grains -from tests.support.runtests import RUNTIME_VARS - - -class TestModulesJinja(ModuleCase): - """ - Test the jinja map module - """ - - def _path(self, name, absolute=False): - path = os.path.join("modules", "jinja", name) - if absolute: - return os.path.join(RUNTIME_VARS.BASE_FILES, path) - else: - return path - - def test_import_json(self): - json_file = "osarchmap.json" - ret = self.run_function("jinja.import_json", [self._path(json_file)]) - with salt.utils.files.fopen(self._path(json_file, absolute=True)) as fh_: - self.assertDictEqual(salt.utils.json.load(fh_), ret) - - def test_import_yaml(self): - yaml_file = "defaults.yaml" - ret = self.run_function("jinja.import_yaml", [self._path(yaml_file)]) - with salt.utils.files.fopen(self._path(yaml_file, absolute=True)) as fh_: - self.assertDictEqual(salt.utils.yaml.safe_load(fh_), ret) - - @requires_system_grains - def test_load_map(self, grains): - ret = self.run_function("jinja.load_map", [self._path("map.jinja"), "template"]) - - assert isinstance( - ret, dict - ), "failed to return dictionary from jinja.load_map: {}".format(ret) - - with salt.utils.files.fopen(self._path("defaults.yaml", absolute=True)) as fh_: - defaults = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen(self._path("osarchmap.json", absolute=True)) as fh_: - osarchmap = salt.utils.json.load(fh_) - with salt.utils.files.fopen( - self._path("osfamilymap.yaml", absolute=True) - ) as fh_: - osfamilymap = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen(self._path("osmap.yaml", absolute=True)) as fh_: - osmap = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen( - self._path("osfingermap.yaml", absolute=True) - ) as fh_: - osfingermap = salt.utils.yaml.safe_load(fh_) - - self.assertEqual( - ret.get("arch"), osarchmap.get(grains["osarch"], {}).get("arch") - ) - self.assertEqual( - ret.get("config"), - osfingermap.get(grains["osfinger"], {}).get( - "config", - osmap.get(grains["os"], {}).get( - "config", - osfamilymap.get(grains["os_family"], {}).get( - "config", defaults.get("template").get("config") - ), - ), - ), - ) diff --git a/tests/integration/renderers/test_jinja.py b/tests/integration/renderers/test_jinja.py deleted file mode 100644 index f0fcd28ff9d..00000000000 --- a/tests/integration/renderers/test_jinja.py +++ /dev/null @@ -1,36 +0,0 @@ -import os - -import pytest - -import salt.utils.files -from tests.support.case import ModuleCase, ShellCase -from tests.support.helpers import with_tempdir - - -class JinjaRendererTest(ModuleCase): - @with_tempdir() - @pytest.mark.slow_test - def test_issue_54765(self, tmpdir): - file_path = os.path.join(tmpdir, "issue-54765") - ret = self.run_function( - "state.sls", mods="issue-54765", pillar={"file_path": file_path} - ) - key = "file_|-issue-54765_|-{}_|-managed".format(file_path) - assert key in ret - assert ret[key]["result"] is True - with salt.utils.files.fopen(file_path, "r") as fp: - assert fp.read().strip() == "bar" - - -class JinjaRenderCallTest(ShellCase): - @with_tempdir() - @pytest.mark.slow_test - def test_issue_54765(self, tmpdir): - file_path = os.path.join(tmpdir, "issue-54765") - pillar_str = '\'{{"file_path": "{}"}}\''.format(file_path) - ret = self.run_call( - "state.apply issue-54765 pillar={}".format(pillar_str), local=True - ) - assert " Result: True" in ret - with salt.utils.files.fopen(file_path, "r") as fp: - assert fp.read().strip() == "bar" diff --git a/tests/integration/states/test_archive.py b/tests/integration/states/test_archive.py index 7d2dba52210..d940db5ecd2 100644 --- a/tests/integration/states/test_archive.py +++ b/tests/integration/states/test_archive.py @@ -106,6 +106,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_source_hash(self): """ test archive.extracted without skip_verify @@ -127,6 +128,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.skip_if_not_root + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_root_user_and_group(self): """ test archive.extracted with user and group set to "root" @@ -151,6 +153,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_strip_in_options(self): """ test archive.extracted with --strip in options @@ -170,6 +173,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(os.path.join(ARCHIVE_DIR, "README")) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_strip_components_in_options(self): """ test archive.extracted with --strip-components in options @@ -190,6 +194,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(os.path.join(ARCHIVE_DIR, "README")) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_without_archive_format(self): """ test archive.extracted with no archive_format option @@ -206,6 +211,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_cmd_unzip_false(self): """ test archive.extracted using use_cmd_unzip argument as false @@ -240,6 +246,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_skip_verify(self): """ test archive.extracted with local file, bad hash and skip_verify @@ -258,6 +265,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_source_hash(self): """ test archive.extracted with local file and valid hash @@ -275,6 +283,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_bad_source_hash(self): """ test archive.extracted with local file and bad hash @@ -289,6 +298,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self.assertSaltFalseReturn(ret) + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_uppercase_source_hash(self): """ test archive.extracted with local file and bad hash diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 105ae4cda5c..ba3defb6cc8 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -24,6 +24,7 @@ from saltfactories.utils import random_string import salt.utils.files import salt.utils.platform from salt.serializers import yaml +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import Webserver, get_virtualenv_binary_path from tests.support.pytest.helpers import TestAccount from tests.support.runtests import RUNTIME_VARS @@ -187,7 +188,10 @@ def salt_master_factory( os.path.join(RUNTIME_VARS.FILES, "returners") ) config_defaults["event_return"] = "runtests_noop" - config_overrides = {"pytest-master": {"log": {"level": "DEBUG"}}} + config_overrides = { + "pytest-master": {"log": {"level": "DEBUG"}}, + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -316,6 +320,7 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -346,6 +351,7 @@ def salt_sub_minion_factory(salt_master_factory, salt_sub_minion_id): config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/pytests/functional/cache/test_consul.py b/tests/pytests/functional/cache/test_consul.py index 3a38e495a93..0a42913b6c2 100644 --- a/tests/pytests/functional/cache/test_consul.py +++ b/tests/pytests/functional/cache/test_consul.py @@ -14,6 +14,7 @@ docker = pytest.importorskip("docker") log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("dockerd"), ] diff --git a/tests/pytests/functional/channel/test_client.py b/tests/pytests/functional/channel/test_client.py new file mode 100644 index 00000000000..145ad95b771 --- /dev/null +++ b/tests/pytests/functional/channel/test_client.py @@ -0,0 +1,25 @@ +import salt.channel.client +from tests.support.mock import MagicMock, patch + + +async def test_async_pub_channel_connect_cb(minion_opts): + """ + Validate connect_callback closes the request channel it creates. + """ + minion_opts["master_uri"] = "tcp://127.0.0.1:4506" + minion_opts["master_ip"] = "127.0.0.1" + with salt.channel.client.AsyncPubChannel.factory(minion_opts) as channel: + + async def send_id(*args): + return + + channel.send_id = send_id + channel._reconnected = True + + mock = MagicMock(salt.channel.client.AsyncReqChannel) + mock.__enter__ = lambda self: mock + + with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock): + await channel.connect_callback(None) + mock.send.assert_called_once() + mock.__exit__.assert_called_once() diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py index 220310aaaf0..38135ac967b 100644 --- a/tests/pytests/functional/modules/state/test_jinja_filters.py +++ b/tests/pytests/functional/modules/state/test_jinja_filters.py @@ -6,6 +6,7 @@ import os import attr import pytest +from pytestskipmarkers.utils import platform import salt.utils.files import salt.utils.path @@ -498,6 +499,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="avg_not_list", + expected={"ret": 2.0}, + sls=""" + {% set result = 2 | avg() %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="difference", expected={"ret": [1, 3]}, @@ -509,6 +521,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="difference_hashable", + expected={"ret": [1, 3]}, + sls=""" + {% set result = (1, 2, 3, 4) | difference((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="intersect", expected={"ret": [2, 4]}, @@ -520,6 +543,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="intersect_hashable", + expected={"ret": [2, 4]}, + sls=""" + {% set result = (1, 2, 3, 4) | intersect((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="max", expected={"ret": 4}, @@ -568,6 +602,28 @@ def _filter_id(value): name="regex_match", expected={"ret": "('a', 'd')"}, sls=""" + {% set result = 'abcd' | regex_match('^(.*)bc(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_match_no_match", + expected={"ret": "None"}, + sls=""" + {% set result = 'abcd' | regex_match('^(.*)BC(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_match_ignorecase", + expected={"ret": "('a', 'd')"}, + sls=""" {% set result = 'abcd' | regex_match('^(.*)BC(.*)$', ignorecase=True) %} test: module.run: @@ -575,6 +631,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_match_multiline", + expected={"ret": "('foo1',)"}, + sls=""" + {% set result = 'foo1\nfoo2\n' | regex_match('(foo.$)', multiline=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="regex_replace", expected={"ret": "lets__replace__spaces"}, @@ -586,10 +653,65 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_replace_no_match", + expected={"ret": "lets replace spaces"}, + sls=r""" + {% set result = 'lets replace spaces' | regex_replace('\s+$', '__') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_replace_ignorecase", + expected={"ret": "barbar"}, + sls=r""" + {% set result = 'FOO1foo2' | regex_replace('foo.', 'bar', ignorecase=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_replace_multiline", + expected={"ret": "bar bar "}, + sls=r""" + {% set result = 'FOO1\nfoo2\n' | regex_replace('^foo.$', 'bar', ignorecase=True, multiline=True) %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), Filter( name="regex_search", expected={"ret": "('a', 'd')"}, sls=""" + {% set result = 'abcd' | regex_search('^(.*)bc(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_search_no_match", + expected={"ret": "None"}, + sls=""" + {% set result = 'abcd' | regex_search('^(.*)BC(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_search_ignorecase", + expected={"ret": "('a', 'd')"}, + sls=""" {% set result = 'abcd' | regex_search('^(.*)BC(.*)$', ignorecase=True) %} test: module.run: @@ -597,6 +719,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_search_multiline", + expected={"ret": "('foo1',)"}, + sls=""" + {% set result = 'foo1\nfoo2\n' | regex_search('(foo.$)', multiline=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="sequence", expected={"ret": ["Salt Rocks!"]}, @@ -630,6 +763,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="symmetric_difference_hashable", + expected={"ret": [1, 3, 6]}, + sls=""" + {% set result = (1, 2, 3, 4) | symmetric_difference((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="to_bool", expected={"ret": True}, @@ -641,6 +785,39 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="to_bool_none", + expected={"ret": "False"}, + sls=""" + {% set result = 'None' | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), + Filter( + name="to_bool_given_bool", + expected={"ret": "True"}, + sls=""" + {% set result = true | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), + Filter( + name="to_bool_not_hashable", + expected={"ret": "True"}, + sls=""" + {% set result = ['hello', 'world'] | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), Filter( name="union", expected={"ret": [1, 2, 3, 4, 6]}, @@ -652,6 +829,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="union_hashable", + expected={"ret": [1, 2, 3, 4, 6]}, + sls=""" + {% set result = (1, 2, 3, 4) | union((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="unique", expected={"ret": ["a", "b", "c"]}, @@ -928,11 +1116,117 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="raise", + expected={"ret": {"Question": "Quieres Café?"}}, + sls=""" + {{ raise('Custom Error') }} + """, + ), + Filter( + name="match", + expected={"ret": "match"}, + sls=""" + {% if 'a' is match('[a-b]') %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="no_match", + expected={"ret": "no match"}, + sls=""" + {% if 'c' is match('[a-b]') %} + {% set result = 'match' %} + {% else %} + {% set result = 'no match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="match_ignorecase", + expected={"ret": "match"}, + sls=""" + {% if 'A' is match('[a-b]', True) %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="match_multiline", + expected={"ret": "match"}, + sls=""" + {% set ml_string = 'this is a multiline\nstring' %} + {% if ml_string is match('.*\n^string', False, True) %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="equalto", + expected={"ret": "equal"}, + sls=""" + {% if 1 is equalto(1) %} + {% set result = 'equal' %} + {% else %} + {% set result = 'not equal' %} + {% endif %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="un_equalto", + expected={"ret": "not equal"}, + sls=""" + {% if 1 is equalto(2) %} + {% set result = 'equal' %} + {% else %} + {% set result = 'not equal' %} + {% endif %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), ], ids=_filter_id, ) def filter(request): - return request.param + _filter = request.param + if platform.is_fips_enabled(): + if _filter.name in ("md5", "random_hash"): + pytest.skip("Test cannot run on a FIPS enabled platform") + return _filter def test_filter(state, state_tree, filter, grains): @@ -940,7 +1234,11 @@ def test_filter(state, state_tree, filter, grains): with filter(state_tree): ret = state.sls("filter") log.debug("state.sls returned: %s", ret) - assert not ret.failed - for state_result in ret: - assert state_result.result is True - filter.assert_result(state_result.changes) + if filter.name == "raise": + assert ret.failed + assert "TemplateError" in ret.errors[0] + else: + assert not ret.failed + for state_result in ret: + assert state_result.result is True + filter.assert_result(state_result.changes) diff --git a/tests/pytests/functional/modules/test_mysql.py b/tests/pytests/functional/modules/test_mysql.py index c37a508588b..d920bbdbc03 100644 --- a/tests/pytests/functional/modules/test_mysql.py +++ b/tests/pytests/functional/modules/test_mysql.py @@ -19,6 +19,7 @@ pytestmark = [ pytest.mark.skipif( mysqlmod.MySQLdb is None, reason="No python mysql client installed." ), + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/functional/modules/test_pkg.py b/tests/pytests/functional/modules/test_pkg.py index fe05788a89f..aaaba7e0a46 100644 --- a/tests/pytests/functional/modules/test_pkg.py +++ b/tests/pytests/functional/modules/test_pkg.py @@ -64,6 +64,8 @@ def test_pkg(grains): elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": _pkg = "snoopy" + elif grains["osfinger"] == "Amazon Linux-2023": + return "dnf-utils" else: _pkg = "units" elif grains["os_family"] == "Debian": diff --git a/tests/pytests/functional/modules/test_x509_v2.py b/tests/pytests/functional/modules/test_x509_v2.py index 42b55d66a6c..dfb973af108 100644 --- a/tests/pytests/functional/modules/test_x509_v2.py +++ b/tests/pytests/functional/modules/test_x509_v2.py @@ -23,7 +23,8 @@ except ImportError: CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split(".")) pytestmark = [ - pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library") + pytest.mark.skip_on_fips_enabled_platform, + pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"), ] diff --git a/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py b/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py index b7cf3a4a37c..77d4b512162 100644 --- a/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py +++ b/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py @@ -8,6 +8,7 @@ from tornado.websocket import websocket_connect import salt.netapi.rest_tornado as rest_tornado import salt.utils.json import salt.utils.yaml +from salt.config import DEFAULT_HASH_TYPE pytestmark = [ pytest.mark.destructive_test, @@ -41,7 +42,7 @@ async def test_websocket_handler_upgrade_to_websocket( ) token = salt.utils.json.loads(response.body)["return"][0]["token"] - url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token) + url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}" request = HTTPRequest( url, headers={"Origin": "http://example.com", "Host": "example.com"} ) @@ -55,10 +56,12 @@ async def test_websocket_handler_bad_token(client_config, http_server, io_loop): A bad token should returns a 401 during a websocket connect """ token = "A" * len( - getattr(hashlib, client_config.get("hash_type", "md5"))().hexdigest() + getattr( + hashlib, client_config.get("hash_type", DEFAULT_HASH_TYPE) + )().hexdigest() ) - url = "ws://127.0.0.1:{}/all_events/{}".format(http_server.port, token) + url = f"ws://127.0.0.1:{http_server.port}/all_events/{token}" request = HTTPRequest( url, headers={"Origin": "http://example.com", "Host": "example.com"} ) @@ -79,7 +82,7 @@ async def test_websocket_handler_cors_origin_wildcard( ) token = salt.utils.json.loads(response.body)["return"][0]["token"] - url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token) + url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}" request = HTTPRequest( url, headers={"Origin": "http://foo.bar", "Host": "example.com"} ) @@ -100,7 +103,7 @@ async def test_cors_origin_single( ) token = salt.utils.json.loads(response.body)["return"][0]["token"] - url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token) + url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}" # Example.com should works request = HTTPRequest( @@ -132,7 +135,7 @@ async def test_cors_origin_multiple( ) token = salt.utils.json.loads(response.body)["return"][0]["token"] - url = "ws://127.0.0.1:{}/all_events/{}".format(http_server_port, token) + url = f"ws://127.0.0.1:{http_server_port}/all_events/{token}" # Example.com should works request = HTTPRequest( diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index bb56f5416f2..82a3f7f154c 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -113,6 +113,48 @@ def test_directory_max_depth(file, tmp_path): assert _mode == _get_oct_mode(untouched_dir) +@pytest.mark.skip_on_windows +def test_directory_children_only(file, tmp_path): + """ + file.directory with children_only=True + """ + + name = tmp_path / "directory_children_only_dir" + name.mkdir(0o0700) + + strayfile = name / "strayfile" + strayfile.touch() + os.chmod(strayfile, 0o700) + + straydir = name / "straydir" + straydir.mkdir(0o0700) + + # none of the children nor parent are currently set to the correct mode + ret = file.directory( + name=str(name), + file_mode="0644", + dir_mode="0755", + recurse=["mode"], + children_only=True, + ) + assert ret.result is True + + # Assert parent directory's mode remains unchanged + assert ( + oct(name.stat().st_mode)[-3:] == "700" + ), f"Expected mode 700 for {name}, got {oct(name.stat().st_mode)[-3:]}" + + # Assert child file's mode is changed + assert ( + oct(strayfile.stat().st_mode)[-3:] == "644" + ), f"Expected mode 644 for {strayfile}, got {oct(strayfile.stat().st_mode)[-3:]}" + + # Assert child directory's mode is changed + assert ( + oct(straydir.stat().st_mode)[-3:] == "755" + ), f"Expected mode 755 for {straydir}, got {oct(straydir.stat().st_mode)[-3:]}" + + def test_directory_clean(file, tmp_path): """ file.directory with clean=True diff --git a/tests/pytests/functional/states/pkgrepo/test_centos.py b/tests/pytests/functional/states/pkgrepo/test_centos.py index 6a84f96ac98..c02da519d2f 100644 --- a/tests/pytests/functional/states/pkgrepo/test_centos.py +++ b/tests/pytests/functional/states/pkgrepo/test_centos.py @@ -242,7 +242,11 @@ def copr_pkgrepo_with_comments_name(pkgrepo, grains): or grains["os"] == "VMware Photon OS" ): pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"])) - if grains["os"] in ("CentOS Stream", "AlmaLinux") and grains["osmajorrelease"] == 9: + if ( + grains["os"] in ("CentOS Stream", "AlmaLinux") + and grains["osmajorrelease"] == 9 + or grains["osfinger"] == "Amazon Linux-2023" + ): pytest.skip("No repo for {} in test COPR yet".format(grains["osfinger"])) pkgrepo_name = "hello-copr" try: diff --git a/tests/pytests/functional/states/test_archive.py b/tests/pytests/functional/states/test_archive.py index 5b253f7b5bd..3e1a63442e0 100644 --- a/tests/pytests/functional/states/test_archive.py +++ b/tests/pytests/functional/states/test_archive.py @@ -41,7 +41,7 @@ class TestRequestHandler(http.server.SimpleHTTPRequestHandler): ) as reqfp: return_data = reqfp.read() # We're using this checksum as the etag to show file changes - checksum = hashlib.md5(return_data).hexdigest() + checksum = hashlib.sha256(return_data).hexdigest() if none_match == checksum: # Status code 304 Not Modified is returned if the file is unchanged status_code = 304 diff --git a/tests/pytests/functional/states/test_file.py b/tests/pytests/functional/states/test_file.py index 77643410621..007ce2b7fdc 100644 --- a/tests/pytests/functional/states/test_file.py +++ b/tests/pytests/functional/states/test_file.py @@ -41,7 +41,7 @@ class RequestHandler(http.server.SimpleHTTPRequestHandler): ) as reqfp: return_text = reqfp.read().encode("utf-8") # We're using this checksum as the etag to show file changes - checksum = hashlib.md5(return_text).hexdigest() + checksum = hashlib.sha256(return_text).hexdigest() if none_match == checksum: # Status code 304 Not Modified is returned if the file is unchanged status_code = 304 diff --git a/tests/pytests/functional/states/test_module.py b/tests/pytests/functional/states/test_module.py index b9afb4f0926..019c085c87b 100644 --- a/tests/pytests/functional/states/test_module.py +++ b/tests/pytests/functional/states/test_module.py @@ -10,21 +10,19 @@ log = logging.getLogger(__name__) @pytest.mark.core_test def test_issue_58763(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ run_old: module.run: - name: test.random_hash - size: 10 - - hash_type: md5 + - hash_type: sha256 run_new: module.run: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree): @@ -42,14 +40,12 @@ def test_issue_58763(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_58763_a(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test.random_hash: module.run: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree): @@ -68,8 +64,6 @@ def test_issue_58763_a(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_58763_b(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test.ping: @@ -90,8 +84,6 @@ def test_issue_58763_b(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_62988_a(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test_foo: @@ -101,7 +93,7 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog): module.wait: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 - watch: - test: test_foo """ @@ -120,8 +112,6 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_62988_b(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test_foo: @@ -133,7 +123,7 @@ def test_issue_62988_b(tmp_path, modules, state_tree, caplog): module.wait: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-62988.sls", sls_contents, state_tree): diff --git a/tests/pytests/functional/states/test_pip_state.py b/tests/pytests/functional/states/test_pip_state.py index 3fc6ac7a1df..551c1472feb 100644 --- a/tests/pytests/functional/states/test_pip_state.py +++ b/tests/pytests/functional/states/test_pip_state.py @@ -25,6 +25,10 @@ except ImportError: log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def _win_user_where(username, password, program): cmd = "cmd.exe /c where {}".format(program) diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py index 04ff8eb51b2..f6e905c383a 100644 --- a/tests/pytests/functional/states/test_pkg.py +++ b/tests/pytests/functional/states/test_pkg.py @@ -43,7 +43,10 @@ def PKG_TARGETS(grains): if grains["os"] == "Windows": _PKG_TARGETS = ["vlc", "putty"] elif grains["os"] == "Amazon": - _PKG_TARGETS = ["lynx", "gnuplot"] + if grains["osfinger"] == "Amazon Linux-2023": + _PKG_TARGETS = ["lynx", "gnuplot-minimal"] + else: + _PKG_TARGETS = ["lynx", "gnuplot"] elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": if grains["osmajorrelease"] >= 5: diff --git a/tests/pytests/functional/states/test_virtualenv_mod.py b/tests/pytests/functional/states/test_virtualenv_mod.py index 7432152aced..af08c5dec21 100644 --- a/tests/pytests/functional/states/test_virtualenv_mod.py +++ b/tests/pytests/functional/states/test_virtualenv_mod.py @@ -9,6 +9,7 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False), ] diff --git a/tests/pytests/functional/states/test_x509_v2.py b/tests/pytests/functional/states/test_x509_v2.py index 7409e6683ed..3cd09d7d840 100644 --- a/tests/pytests/functional/states/test_x509_v2.py +++ b/tests/pytests/functional/states/test_x509_v2.py @@ -1,5 +1,5 @@ import base64 -from pathlib import Path +import pathlib import pytest @@ -26,6 +26,7 @@ CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split(".") pytestmark = [ pytest.mark.slow_test, pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"), + pytest.mark.skip_on_fips_enabled_platform, ] @@ -703,7 +704,7 @@ def existing_pk(x509, pk_args, request): @pytest.fixture(params=["existing_cert"]) def existing_symlink(request): existing = request.getfixturevalue(request.param) - test_file = Path(existing).with_name("symlink") + test_file = pathlib.Path(existing).with_name("symlink") test_file.symlink_to(existing) yield test_file # cleanup is done by tmp_path @@ -884,7 +885,7 @@ def test_certificate_managed_test_true(x509, cert_args, rsa_privkey, ca_key): ret = x509.certificate_managed(**cert_args) assert ret.result is None assert ret.changes - assert not Path(cert_args["name"]).exists() + assert not pathlib.Path(cert_args["name"]).exists() @pytest.mark.usefixtures("existing_cert") @@ -1324,7 +1325,7 @@ def test_certificate_managed_file_managed_create_false( ret = x509.certificate_managed(**cert_args) assert ret.result is True assert not ret.changes - assert not Path(cert_args["name"]).exists() + assert not pathlib.Path(cert_args["name"]).exists() @pytest.mark.usefixtures("existing_cert") @@ -1397,7 +1398,7 @@ def test_certificate_managed_follow_symlinks( """ cert_args["name"] = str(existing_symlink) cert_args["encoding"] = encoding - assert Path(cert_args["name"]).is_symlink() + assert pathlib.Path(cert_args["name"]).is_symlink() cert_args["follow_symlinks"] = follow ret = x509.certificate_managed(**cert_args) assert bool(ret.changes) == (not follow) @@ -1417,13 +1418,13 @@ def test_certificate_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ cert_args["name"] = str(existing_symlink) - assert Path(cert_args["name"]).is_symlink() + assert pathlib.Path(cert_args["name"]).is_symlink() cert_args["follow_symlinks"] = follow cert_args["encoding"] = encoding cert_args["CN"] = "new" ret = x509.certificate_managed(**cert_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -1436,7 +1437,7 @@ def test_certificate_managed_file_managed_error( cert_args["private_key"] = rsa_privkey cert_args["makedirs"] = False cert_args["encoding"] = encoding - cert_args["name"] = str(Path(cert_args["name"]).parent / "missing" / "cert") + cert_args["name"] = str(pathlib.Path(cert_args["name"]).parent / "missing" / "cert") ret = x509.certificate_managed(**cert_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -1504,7 +1505,7 @@ def test_crl_managed_test_true(x509, crl_args, crl_revoked): assert ret.result is None assert ret.changes assert ret.result is None - assert not Path(crl_args["name"]).exists() + assert not pathlib.Path(crl_args["name"]).exists() @pytest.mark.usefixtures("existing_crl") @@ -1708,7 +1709,7 @@ def test_crl_managed_file_managed_create_false(x509, crl_args): ret = x509.crl_managed(**crl_args) assert ret.result is True assert not ret.changes - assert not Path(crl_args["name"]).exists() + assert not pathlib.Path(crl_args["name"]).exists() @pytest.mark.usefixtures("existing_crl") @@ -1782,7 +1783,7 @@ def test_crl_managed_follow_symlinks( """ crl_args["name"] = str(existing_symlink) crl_args["encoding"] = encoding - assert Path(crl_args["name"]).is_symlink() + assert pathlib.Path(crl_args["name"]).is_symlink() crl_args["follow_symlinks"] = follow ret = x509.crl_managed(**crl_args) assert bool(ret.changes) == (not follow) @@ -1802,13 +1803,13 @@ def test_crl_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ crl_args["name"] = str(existing_symlink) - assert Path(crl_args["name"]).is_symlink() + assert pathlib.Path(crl_args["name"]).is_symlink() crl_args["follow_symlinks"] = follow crl_args["encoding"] = encoding crl_args["revoked"] = crl_revoked ret = x509.crl_managed(**crl_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -1818,7 +1819,7 @@ def test_crl_managed_file_managed_error(x509, crl_args, encoding): """ crl_args["makedirs"] = False crl_args["encoding"] = encoding - crl_args["name"] = str(Path(crl_args["name"]).parent / "missing" / "crl") + crl_args["name"] = str(pathlib.Path(crl_args["name"]).parent / "missing" / "crl") ret = x509.crl_managed(**crl_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -1866,7 +1867,7 @@ def test_csr_managed_test_true(x509, csr_args, rsa_privkey): ret = x509.csr_managed(**csr_args) assert ret.result is None assert ret.changes - assert not Path(csr_args["name"]).exists() + assert not pathlib.Path(csr_args["name"]).exists() @pytest.mark.usefixtures("existing_csr") @@ -2002,7 +2003,7 @@ def test_csr_managed_file_managed_create_false(x509, csr_args): ret = x509.csr_managed(**csr_args) assert ret.result is True assert not ret.changes - assert not Path(csr_args["name"]).exists() + assert not pathlib.Path(csr_args["name"]).exists() @pytest.mark.usefixtures("existing_csr") @@ -2066,12 +2067,12 @@ def test_csr_managed_follow_symlinks( the checking of the existing file is performed by the x509 module """ csr_args["name"] = str(existing_symlink) - assert Path(csr_args["name"]).is_symlink() + assert pathlib.Path(csr_args["name"]).is_symlink() csr_args["follow_symlinks"] = follow csr_args["encoding"] = encoding ret = x509.csr_managed(**csr_args) assert bool(ret.changes) == (not follow) - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize( @@ -2088,14 +2089,14 @@ def test_csr_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ csr_args["name"] = str(existing_symlink) - assert Path(csr_args["name"]).is_symlink() + assert pathlib.Path(csr_args["name"]).is_symlink() csr_args["follow_symlinks"] = follow csr_args["encoding"] = encoding csr_args["CN"] = "new" ret = x509.csr_managed(**csr_args) assert ret.result assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -2105,7 +2106,7 @@ def test_csr_managed_file_managed_error(x509, csr_args, encoding): """ csr_args["makedirs"] = False csr_args["encoding"] = encoding - csr_args["name"] = str(Path(csr_args["name"]).parent / "missing" / "csr") + csr_args["name"] = str(pathlib.Path(csr_args["name"]).parent / "missing" / "csr") ret = x509.csr_managed(**csr_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -2312,7 +2313,7 @@ def test_private_key_managed_file_managed_create_false(x509, pk_args): ret = x509.private_key_managed(**pk_args) assert ret.result is True assert not ret.changes - assert not Path(pk_args["name"]).exists() + assert not pathlib.Path(pk_args["name"]).exists() @pytest.mark.usefixtures("existing_pk") @@ -2361,7 +2362,7 @@ def test_private_key_managed_follow_symlinks( """ pk_args["name"] = str(existing_symlink) pk_args["encoding"] = encoding - assert Path(pk_args["name"]).is_symlink() + assert pathlib.Path(pk_args["name"]).is_symlink() pk_args["follow_symlinks"] = follow ret = x509.private_key_managed(**pk_args) assert bool(ret.changes) == (not follow) @@ -2381,13 +2382,13 @@ def test_private_key_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ pk_args["name"] = str(existing_symlink) - assert Path(pk_args["name"]).is_symlink() + assert pathlib.Path(pk_args["name"]).is_symlink() pk_args["follow_symlinks"] = follow pk_args["encoding"] = encoding pk_args["algo"] = "ec" ret = x509.private_key_managed(**pk_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.usefixtures("existing_pk") @@ -2415,7 +2416,7 @@ def test_private_key_managed_file_managed_error(x509, pk_args, encoding): """ pk_args["makedirs"] = False pk_args["encoding"] = encoding - pk_args["name"] = str(Path(pk_args["name"]).parent / "missing" / "pk") + pk_args["name"] = str(pathlib.Path(pk_args["name"]).parent / "missing" / "pk") ret = x509.private_key_managed(**pk_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -2693,7 +2694,7 @@ def _assert_cert_basic( def _get_cert(cert, encoding="pem", passphrase=None): try: - p = Path(cert) + p = pathlib.Path(cert) if p.exists(): cert = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2775,7 +2776,7 @@ def _assert_not_changed(ret): def _get_crl(crl, encoding="pem"): try: - p = Path(crl) + p = pathlib.Path(crl) if p.exists(): crl = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2793,7 +2794,7 @@ def _get_crl(crl, encoding="pem"): def _get_csr(csr, encoding="pem"): try: - p = Path(csr) + p = pathlib.Path(csr) if p.exists(): csr = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2811,7 +2812,7 @@ def _get_csr(csr, encoding="pem"): def _get_privkey(pk, encoding="pem", passphrase=None): try: - p = Path(pk) + p = pathlib.Path(pk) if p.exists(): pk = p.read_bytes() except Exception: # pylint: disable=broad-except diff --git a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py index 5a0600bba57..70b1638a849 100644 --- a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py +++ b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py @@ -20,11 +20,13 @@ def configure_loader_modules(minion_opts, modules): "__opts__": minion_opts, "__salt__": modules, "__utils__": utils, + "__context__": {}, }, win_lgpo_module: { "__opts__": minion_opts, "__salt__": modules, "__utils__": utils, + "__context__": {}, }, } diff --git a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py index 431830d4709..85d4cc70da1 100644 --- a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py @@ -14,9 +14,10 @@ log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", - ) + ), ] diff --git a/tests/pytests/functional/transport/tcp/test_load_balanced_server.py b/tests/pytests/functional/transport/tcp/test_load_balanced_server.py index 5b312d60e50..249d2eec7db 100644 --- a/tests/pytests/functional/transport/tcp/test_load_balanced_server.py +++ b/tests/pytests/functional/transport/tcp/test_load_balanced_server.py @@ -13,6 +13,7 @@ pytestmark = [ ] +@pytest.mark.skip_on_fips_enabled_platform def test_tcp_load_balancer_server(master_opts, io_loop): messages = [] @@ -27,7 +28,7 @@ def test_tcp_load_balancer_server(master_opts, io_loop): def run_loop(): try: io_loop.start() - except Exception as exc: + except Exception as exc: # pylint: disable=broad-except print(f"Caught exeption {exc}") thread = threading.Thread(target=server.run) @@ -50,7 +51,7 @@ def test_tcp_load_balancer_server(master_opts, io_loop): if time.monotonic() - start > 30: break - io_loop.run_sync(lambda: check_test()) + io_loop.run_sync(lambda: check_test()) # pylint: disable=unnecessary-lambda try: if time.monotonic() - start > 30: diff --git a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py index 80e83087b84..596a7a0fb87 100644 --- a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py @@ -13,6 +13,7 @@ log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_freebsd(reason="Temporarily skipped on FreeBSD."), pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", diff --git a/tests/pytests/functional/utils/pyinstaller/__init__.py b/tests/pytests/functional/utils/pyinstaller/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py b/tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py deleted file mode 100644 index c45b5730a8e..00000000000 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py +++ /dev/null @@ -1,142 +0,0 @@ -import json -import os -import sys - -import pytest - -import salt.utils.pyinstaller.rthooks._overrides as overrides -from tests.support import mock -from tests.support.helpers import PatchedEnviron - - -@pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) -def envvar(request): - return request.param - - -@pytest.fixture -def meipass(envvar): - with mock.patch("salt.utils.pyinstaller.rthooks._overrides.sys") as patched_sys: - patched_sys._MEIPASS = "{}_VALUE".format(envvar) - assert overrides.sys._MEIPASS == "{}_VALUE".format(envvar) - yield "{}_VALUE".format(envvar) - assert not hasattr(sys, "_MEIPASS") - assert not hasattr(overrides.sys, "_MEIPASS") - - -def test_vt_terminal_environ_cleanup_original(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - with PatchedEnviron(**{orig_envvar: meipass}): - original_env = dict(os.environ) - assert orig_envvar in original_env - instance = overrides.PyinstallerTerminal( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_vt_terminal_environ_cleanup_original_passed_directly(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - env = { - orig_envvar: meipass, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerTerminal( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_vt_terminal_environ_cleanup(envvar, meipass): - with PatchedEnviron(**{envvar: meipass}): - original_env = dict(os.environ) - assert envvar in original_env - instance = overrides.PyinstallerTerminal( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == "" - - -def test_vt_terminal_environ_cleanup_passed_directly_not_removed(envvar, meipass): - env = { - envvar: envvar, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerTerminal( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == envvar diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py deleted file mode 100644 index 836e392d016..00000000000 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py +++ /dev/null @@ -1,111 +0,0 @@ -import json -import os -import subprocess -import sys - -import pytest - -import salt.utils.pyinstaller.rthooks._overrides as overrides -from tests.support import mock -from tests.support.helpers import PatchedEnviron - - -@pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) -def envvar(request): - return request.param - - -@pytest.fixture -def meipass(envvar): - with mock.patch("salt.utils.pyinstaller.rthooks._overrides.sys") as patched_sys: - patched_sys._MEIPASS = "{}_VALUE".format(envvar) - assert overrides.sys._MEIPASS == "{}_VALUE".format(envvar) - yield "{}_VALUE".format(envvar) - assert not hasattr(sys, "_MEIPASS") - assert not hasattr(overrides.sys, "_MEIPASS") - - -def test_subprocess_popen_environ_cleanup_original(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - with PatchedEnviron(**{orig_envvar: meipass}): - original_env = dict(os.environ) - assert orig_envvar in original_env - instance = overrides.PyinstallerPopen( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_subprocess_popen_environ_cleanup_original_passed_directly(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - env = { - orig_envvar: meipass, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerPopen( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_subprocess_popen_environ_cleanup(envvar, meipass): - with PatchedEnviron(**{envvar: meipass}): - original_env = dict(os.environ) - assert envvar in original_env - instance = overrides.PyinstallerPopen( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == "" - - -def test_subprocess_popen_environ_cleanup_passed_directly_not_removed(envvar, meipass): - env = { - envvar: envvar, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerPopen( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == envvar diff --git a/tests/pytests/integration/cli/test_salt.py b/tests/pytests/integration/cli/test_salt.py index fa28e3bce3d..231e8b7dc4b 100644 --- a/tests/pytests/integration/cli/test_salt.py +++ b/tests/pytests/integration/cli/test_salt.py @@ -24,6 +24,19 @@ pytestmark = [ ] +@pytest.fixture +def salt_minion_2(salt_master): + """ + A running salt-minion fixture + """ + factory = salt_master.salt_minion_daemon( + "minion-2", + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + def test_context_retcode_salt(salt_cli, salt_minion): """ Test that a nonzero retcode set in the context dunder will cause the @@ -234,3 +247,25 @@ def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion): assert "Exiting gracefully on Ctrl-c" in ret.stderr assert "Exception ignored in" not in ret.stderr assert "This job's jid is" in ret.stderr + + +def test_minion_65400(salt_cli, salt_minion, salt_minion_2, salt_master): + """ + Ensure correct exit status when salt CLI starts correctly. + + """ + state = f""" + custom_test_state: + test.configurable_test_state: + - name: example + - changes: True + - result: False + - comment: 65400 regression test + """ + with salt_master.state_tree.base.temp_file("test_65400.sls", state): + ret = salt_cli.run("state.sls", "test_65400", minion_tgt="*") + assert isinstance(ret.data, dict) + assert len(ret.data.keys()) == 2 + for minion_id in ret.data: + assert ret.data[minion_id] != "Error: test.configurable_test_state" + assert isinstance(ret.data[minion_id], dict) diff --git a/tests/pytests/integration/daemons/test_memory_leak.py b/tests/pytests/integration/daemons/test_memory_leak.py index 1b782760418..fb608fc1864 100644 --- a/tests/pytests/integration/daemons/test_memory_leak.py +++ b/tests/pytests/integration/daemons/test_memory_leak.py @@ -44,6 +44,7 @@ def file_add_delete_sls(testfile_path, base_env_state_tree_root_dir): yield sls_name +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.skip_on_darwin(reason="MacOS is a spawning platform, won't work") @pytest.mark.flaky(max_runs=4) def test_memory_leak(salt_cli, salt_minion, file_add_delete_sls): diff --git a/tests/pytests/integration/modules/test_jinja.py b/tests/pytests/integration/modules/test_jinja.py new file mode 100644 index 00000000000..0ae98dbf7dc --- /dev/null +++ b/tests/pytests/integration/modules/test_jinja.py @@ -0,0 +1,64 @@ +""" +Test the jinja module +""" + +import os + +import salt.utils.files +import salt.utils.json +import salt.utils.yaml +from tests.support.runtests import RUNTIME_VARS + + +def _path(name, absolute=False): + path = os.path.join("modules", "jinja", name) + if absolute: + return os.path.join(RUNTIME_VARS.BASE_FILES, path) + else: + return path + + +def test_import_json(salt_cli, salt_minion): + json_file = "osarchmap.json" + ret = salt_cli.run("jinja.import_json", _path(json_file), minion_tgt=salt_minion.id) + with salt.utils.files.fopen(_path(json_file, absolute=True)) as fh_: + assert salt.utils.json.load(fh_) == ret.data + + +def test_import_yaml(salt_cli, salt_minion): + yaml_file = "defaults.yaml" + ret = salt_cli.run("jinja.import_yaml", _path(yaml_file), minion_tgt=salt_minion.id) + with salt.utils.files.fopen(_path(yaml_file, absolute=True)) as fh_: + assert salt.utils.yaml.safe_load(fh_) == ret.data + + +def test_load_map(grains, salt_cli, salt_minion): + ret = salt_cli.run( + "jinja.load_map", _path("map.jinja"), "template", minion_tgt=salt_minion.id + ) + + assert isinstance( + ret.data, dict + ), "failed to return dictionary from jinja.load_map: {}".format(ret) + + with salt.utils.files.fopen(_path("defaults.yaml", absolute=True)) as fh_: + defaults = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osarchmap.json", absolute=True)) as fh_: + osarchmap = salt.utils.json.load(fh_) + with salt.utils.files.fopen(_path("osfamilymap.yaml", absolute=True)) as fh_: + osfamilymap = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osmap.yaml", absolute=True)) as fh_: + osmap = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osfingermap.yaml", absolute=True)) as fh_: + osfingermap = salt.utils.yaml.safe_load(fh_) + + assert ret.data.get("arch") == osarchmap.get(grains["osarch"], {}).get("arch") + assert ret.data.get("config") == osfingermap.get(grains["osfinger"], {}).get( + "config", + osmap.get(grains["os"], {}).get( + "config", + osfamilymap.get(grains["os_family"], {}).get( + "config", defaults.get("template").get("config") + ), + ), + ) diff --git a/tests/pytests/integration/renderers/test_jinja.py b/tests/pytests/integration/renderers/test_jinja.py new file mode 100644 index 00000000000..1a902e2047e --- /dev/null +++ b/tests/pytests/integration/renderers/test_jinja.py @@ -0,0 +1,36 @@ +import pytest + +import salt.utils.files + +pytestmark = [ + pytest.mark.slow_test, +] + + +def test_issue_54765_salt(tmp_path, salt_cli, salt_minion): + file_path = str(tmp_path / "issue-54765") + ret = salt_cli.run( + "state.sls", + mods="issue-54765", + pillar={"file_path": file_path}, + minion_tgt=salt_minion.id, + ).data + key = "file_|-issue-54765_|-{}_|-managed".format(file_path) + assert key in ret + assert ret[key]["result"] is True + with salt.utils.files.fopen(file_path, "r") as fp: + assert fp.read().strip() == "bar" + + +def test_issue_54765_call(tmp_path, salt_call_cli): + file_path = str(tmp_path / "issue-54765") + ret = salt_call_cli.run( + "--local", + "state.apply", + "issue-54765", + pillar=f"{{'file_path': '{file_path}'}}", + ) + key = "file_|-issue-54765_|-{}_|-managed".format(file_path) + assert ret.data[key]["result"] is True + with salt.utils.files.fopen(file_path, "r") as fp: + assert fp.read().strip() == "bar" diff --git a/tests/pytests/integration/runners/test_saltutil.py b/tests/pytests/integration/runners/test_saltutil.py index c04ae1a519f..e13d38fb142 100644 --- a/tests/pytests/integration/runners/test_saltutil.py +++ b/tests/pytests/integration/runners/test_saltutil.py @@ -100,6 +100,34 @@ def world(): assert f"{module_type}.hello" in ret.stdout +def test_sync_refresh_false( + module_type, module_sync_functions, salt_run_cli, salt_minion, salt_master +): + """ + Ensure modules are synced when various sync functions are called + """ + module_name = f"hello_sync_{module_type}" + module_contents = """ +def __virtual__(): + return "hello" + +def world(): + return "world" +""" + + test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}" + test_moduledir.mkdir(parents=True, exist_ok=True) + module_tempfile = salt_master.state_tree.base.temp_file( + f"_{module_type}/{module_name}.py", module_contents + ) + + with module_tempfile: + salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}" + ret = salt_run_cli.run(salt_cmd, saltenv=None, refresh=False) + assert ret.returncode == 0 + assert f"saltutil.sync_{module_sync_functions[module_type]}" in ret.stdout + + def _write_module_dir_and_file(module_type, salt_minion, salt_master): """ Write out dummy module to appropriate module location diff --git a/tests/pytests/integration/ssh/test_pre_flight.py b/tests/pytests/integration/ssh/test_pre_flight.py index 09c65d29430..c2fc14094e8 100644 --- a/tests/pytests/integration/ssh/test_pre_flight.py +++ b/tests/pytests/integration/ssh/test_pre_flight.py @@ -19,7 +19,9 @@ from saltfactories.utils import random_string import salt.utils.files -pytestmark = pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows") +pytestmark = [ + pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows"), +] def _custom_roster(roster_file, roster_data): @@ -33,33 +35,39 @@ def _custom_roster(roster_file, roster_data): @pytest.fixture def _create_roster(salt_ssh_roster_file, tmp_path): - ret = {} - ret["roster"] = salt_ssh_roster_file - ret["data"] = {"ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh")} - ret["test_script"] = str(tmp_path / "test-pre-flight-script-worked.txt") - ret["thin_dir"] = tmp_path / "thin_dir" + thin_dir = tmp_path / "thin-dir" + ret = { + "roster": salt_ssh_roster_file, + "data": { + "ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh"), + }, + "test_script": str(tmp_path / "test-pre-flight-script-worked.txt"), + "thin_dir": str(thin_dir), + } with salt.utils.files.fopen(salt_ssh_roster_file, "r") as fp: data = salt.utils.yaml.safe_load(fp) + pre_flight_script = ret["data"]["ssh_pre_flight"] data["localhost"]["ssh_pre_flight"] = pre_flight_script - data["localhost"]["thin_dir"] = str(ret["thin_dir"]) + data["localhost"]["thin_dir"] = ret["thin_dir"] with salt.utils.files.fopen(salt_ssh_roster_file, "w") as fp: yaml.safe_dump(data, fp) with salt.utils.files.fopen(pre_flight_script, "w") as fp: fp.write("touch {}".format(ret["test_script"])) - yield ret - if ret["thin_dir"].exists(): - shutil.rmtree(ret["thin_dir"]) + try: + yield ret + finally: + if thin_dir.exists(): + shutil.rmtree(thin_dir) @pytest.mark.slow_test def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster): """ - test ssh when ssh_pre_flight is set - ensure the script runs successfully + test ssh when ssh_pre_flight is set ensure the script runs successfully """ ret = salt_ssh_cli.run("test.ping") assert ret.returncode == 0 @@ -70,8 +78,7 @@ def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster): @pytest.mark.slow_test def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster): """ - test ssh when --pre-flight is passed to salt-ssh - to ensure the script runs successfully + test ssh when --pre-flight is passed to salt-ssh to ensure the script runs successfully """ # make sure we previously ran a command so the thin dir exists ret = salt_ssh_cli.run("test.ping") @@ -85,10 +92,7 @@ def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster): assert not pathlib.Path(_create_roster["test_script"]).exists() # Now ensure - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 assert pathlib.Path(_create_roster["test_script"]).exists() @@ -115,18 +119,15 @@ def test_ssh_run_pre_flight_args(salt_ssh_cli, _create_roster): assert ret.returncode == 0 assert test_script_1.exists() assert test_script_2.exists() - pathlib.Path(test_script_1).unlink() - pathlib.Path(test_script_2).unlink() + test_script_1.unlink() + test_script_2.unlink() ret = salt_ssh_cli.run("test.ping") assert ret.returncode == 0 assert not test_script_1.exists() assert not test_script_2.exists() - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 assert test_script_1.exists() assert test_script_2.exists() @@ -166,17 +167,14 @@ def test_ssh_run_pre_flight_args_prevent_injection( test_script_2.unlink() assert not injected_file.is_file() - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 assert test_script_1.exists() assert test_script_2.exists() - assert not pathlib.Path( - injected_file - ).is_file(), "File injection suceeded. This shouldn't happend" + assert ( + not injected_file.is_file() + ), "File injection suceeded. This shouldn't happend" @pytest.mark.flaky(max_runs=4) @@ -189,10 +187,7 @@ def test_ssh_run_pre_flight_failure(salt_ssh_cli, _create_roster): with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp_: fp_.write("exit 2") - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.data["retcode"] == 2 @@ -255,7 +250,7 @@ def test_ssh_pre_flight_perms(salt_ssh_cli, caplog, _create_roster, account): x=1 while [ $x -le 200000 ]; do SCRIPT=`bash {str(tmp_preflight)} 2> /dev/null; echo $?` - if [ ${{SCRIPT}} == 0 ]; then + if [ ${{SCRIPT}} -eq 0 ]; then break fi x=$(( $x + 1 )) @@ -301,10 +296,7 @@ def test_ssh_run_pre_flight_target_file_perms(salt_ssh_cli, _create_roster, tmp_ """ ) - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 with salt.utils.files.fopen(perms_file) as fp: data = fp.read() diff --git a/tests/pytests/integration/ssh/test_saltcheck.py b/tests/pytests/integration/ssh/test_saltcheck.py index 51068850265..a19fe9f1270 100644 --- a/tests/pytests/integration/ssh/test_saltcheck.py +++ b/tests/pytests/integration/ssh/test_saltcheck.py @@ -23,6 +23,7 @@ def test_saltcheck_run_test(salt_ssh_cli): assert ret.data["status"] == "Pass" +@pytest.mark.skip_on_aarch64 def test_saltcheck_state(salt_ssh_cli): """ saltcheck.run_state_tests diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index be01852919b..b13a2a8922a 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -666,6 +666,7 @@ def test_privkey_new_with_prereq(x509_salt_call_cli, tmp_path): assert not _belongs_to(cert_new, pk_cur) +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.usefixtures("privkey_new_pkcs12") @pytest.mark.skipif( CRYPTOGRAPHY_VERSION[0] < 36, diff --git a/tests/pytests/scenarios/compat/test_with_versions.py b/tests/pytests/scenarios/compat/test_with_versions.py index 75a2b87f24c..ecb3a73de1a 100644 --- a/tests/pytests/scenarios/compat/test_with_versions.py +++ b/tests/pytests/scenarios/compat/test_with_versions.py @@ -29,7 +29,7 @@ pytestmark = [ def _get_test_versions_ids(value): - return "SaltMinion~={}".format(value) + return f"SaltMinion~={value}" @pytest.fixture( @@ -41,13 +41,13 @@ def compat_salt_version(request): @pytest.fixture(scope="module") def minion_image_name(compat_salt_version): - return "salt-{}".format(compat_salt_version) + return f"salt-{compat_salt_version}" @pytest.fixture(scope="function") def minion_id(compat_salt_version): return random_string( - "salt-{}-".format(compat_salt_version), + f"salt-{compat_salt_version}-", uppercase=False, ) @@ -70,7 +70,10 @@ def salt_minion( config_overrides = { "master": salt_master.config["interface"], "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, # We also want to scrutinize the key acceptance "open_mode": False, } diff --git a/tests/pytests/scenarios/performance/conftest.py b/tests/pytests/scenarios/performance/conftest.py index 728bde397b5..2485cf51fd3 100644 --- a/tests/pytests/scenarios/performance/conftest.py +++ b/tests/pytests/scenarios/performance/conftest.py @@ -5,17 +5,10 @@ import logging import shutil import pytest -from saltfactories.daemons.container import Container +from saltfactories.utils import random_string -import salt.utils.path from tests.support.sminion import create_sminion -docker = pytest.importorskip("docker") -# pylint: disable=3rd-party-module-not-gated,no-name-in-module -from docker.errors import DockerException # isort:skip - -# pylint: enable=3rd-party-module-not-gated,no-name-in-module - pytestmark = [ pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("docker"), @@ -26,36 +19,18 @@ log = logging.getLogger(__name__) @pytest.fixture(scope="session") -def docker_client(): - if docker is None: - pytest.skip("The docker python library is not available") - - if salt.utils.path.which("docker") is None: - pytest.skip("The docker binary is not available") - try: - client = docker.from_env() - connectable = Container.client_connectable(client) - if connectable is not True: # pragma: no cover - pytest.skip(connectable) - return client - except DockerException: - pytest.skip("Failed to get a connection to docker running on the system") +def docker_network_name(): + return random_string("salt-perf-", uppercase=False) @pytest.fixture(scope="session") -def network(): - return "salt-performance" - - -@pytest.fixture(scope="session") -def host_docker_network_ip_address(network): +def host_docker_network_ip_address(docker_network_name): sminion = create_sminion() - network_name = network network_subnet = "10.0.21.0/24" network_gateway = "10.0.21.1" try: ret = sminion.states.docker_network.present( - network_name, + docker_network_name, driver="bridge", ipam_pools=[{"subnet": network_subnet, "gateway": network_gateway}], ) @@ -66,7 +41,7 @@ def host_docker_network_ip_address(network): pytest.skip(f"Failed to create docker network: {ret}") yield network_gateway finally: - sminion.states.docker_network.absent(network_name) + sminion.states.docker_network.absent(docker_network_name) @pytest.fixture(scope="session") diff --git a/tests/pytests/scenarios/performance/test_performance.py b/tests/pytests/scenarios/performance/test_performance.py index 85b92ed986e..e9e0d0def65 100644 --- a/tests/pytests/scenarios/performance/test_performance.py +++ b/tests/pytests/scenarios/performance/test_performance.py @@ -1,40 +1,22 @@ +import logging import os import shutil -import time +import sys import pytest from pytestshellutils.utils import ports -from saltfactories.daemons import master -from saltfactories.daemons.container import SaltDaemon, SaltMinion +from saltfactories.daemons.container import SaltMaster, SaltMinion from saltfactories.utils import random_string -from salt.version import SaltVersionsInfo, __version__ +from salt.version import SaltVersionsInfo +from tests.conftest import CODE_DIR -pytestmark = [pytest.mark.skip_if_binaries_missing("docker")] +log = logging.getLogger(__name__) - -class ContainerMaster(SaltDaemon, master.SaltMaster): - """ - Containerized salt master that has no check events - """ - - def get_display_name(self): - return master.SaltMaster.get_display_name(self) - - def get_check_events(self): - return [] - - -class ContainerMinion(SaltMinion): - """ - Containerized salt minion that has no check events - """ - - def get_check_events(self): - return [] - - -# ---------------------- Previous Version Setup ---------------------- +pytestmark = [ + pytest.mark.skip_on_photonos, + pytest.mark.skip_if_binaries_missing("docker"), +] @pytest.fixture @@ -42,14 +24,32 @@ def prev_version(): return str(SaltVersionsInfo.previous_release().info[0]) +@pytest.fixture +def prev_container_image(shell, prev_version): + container = f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}" + ret = shell.run("docker", "pull", container, check=False) + if ret.returncode: + pytest.skip(f"Failed to pull docker image '{container}':\n{ret}") + return container + + @pytest.fixture def curr_version(): return str(SaltVersionsInfo.current_release().info[0]) +@pytest.fixture +def curr_container_image(shell): + container = "ghcr.io/saltstack/salt-ci-containers/salt:latest" + ret = shell.run("docker", "pull", container, check=False) + if ret.returncode: + pytest.skip(f"Failed to pull docker image '{container}':\n{ret}") + return container + + @pytest.fixture def prev_master_id(): - return random_string("master-performance-prev-", uppercase=False) + return random_string("master-perf-prev-", uppercase=False) @pytest.fixture @@ -57,10 +57,10 @@ def prev_master( request, salt_factories, host_docker_network_ip_address, - network, + docker_network_name, prev_version, - docker_client, prev_master_id, + prev_container_image, ): root_dir = salt_factories.get_root_dir_for_daemon(prev_master_id) conf_dir = root_dir / "conf" @@ -69,35 +69,36 @@ def prev_master( config_defaults = { "root_dir": str(root_dir), "transport": request.config.getoption("--transport"), - "user": False, + "user": "root", } - publish_port = ports.get_unused_localhost_port() - ret_port = ports.get_unused_localhost_port() config_overrides = { + "open_mode": True, "interface": "0.0.0.0", - "publish_port": publish_port, - "ret_port": ret_port, + "publish_port": ports.get_unused_localhost_port(), + "ret_port": ports.get_unused_localhost_port(), "log_level_logfile": "quiet", "pytest-master": { "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, }, } factory = salt_factories.salt_master_daemon( prev_master_id, + name=prev_master_id, defaults=config_defaults, overrides=config_overrides, - factory_class=ContainerMaster, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), + factory_class=SaltMaster, base_script_args=["--log-level=debug"], + image=prev_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": prev_master_id, }, - docker_client=docker_client, - name=prev_master_id, start_timeout=120, - max_start_attempts=1, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) with factory.started(): @@ -122,7 +123,7 @@ def prev_salt_run_cli(prev_master): @pytest.fixture def prev_minion_id(): return random_string( - "minion-performance-prev-", + "minion-perf-prev-", uppercase=False, ) @@ -131,34 +132,38 @@ def prev_minion_id(): def prev_minion( prev_minion_id, prev_master, - docker_client, prev_version, host_docker_network_ip_address, - network, - prev_master_id, + docker_network_name, + prev_container_image, ): config_overrides = { - "master": prev_master_id, - "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "master": prev_master.id, + "open_mode": True, + "user": "root", + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, } factory = prev_master.salt_minion_daemon( prev_minion_id, - overrides=config_overrides, - factory_class=ContainerMinion, - # SaltMinion kwargs name=prev_minion_id, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), - docker_client=docker_client, - start_timeout=120, - pull_before_start=False, - skip_if_docker_client_not_connectable=True, + overrides=config_overrides, + factory_class=SaltMinion, + base_script_args=["--log-level=debug"], + image=prev_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": prev_minion_id, }, - max_start_attempts=1, + start_timeout=120, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, ) + factory.python_executable = "python3" factory.after_terminate( pytest.helpers.remove_stale_minion_key, prev_master, factory.id ) @@ -172,21 +177,38 @@ def prev_sls(sls_contents, state_tree, tmp_path): location = tmp_path / "prev" / "testfile" location.parent.mkdir() with pytest.helpers.temp_file( - "{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree + f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree ): yield sls_name -# ---------------------- Current Version Setup ---------------------- +def _install_salt_in_container(container): + ret = container.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() - -def _install_local_salt(factory): - factory.run("pip install /saltcode") + ret = container.run( + "python3", + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + "/salt", + ) + log.debug("Install Salt in the container: %s", ret) + assert ret.returncode == 0 @pytest.fixture def curr_master_id(): - return random_string("master-performance-", uppercase=False) + return random_string("master-perf-curr-", uppercase=False) @pytest.fixture @@ -194,9 +216,9 @@ def curr_master( request, salt_factories, host_docker_network_ip_address, - network, - docker_client, + docker_network_name, curr_master_id, + curr_container_image, ): root_dir = salt_factories.get_root_dir_for_daemon(curr_master_id) conf_dir = root_dir / "conf" @@ -205,43 +227,46 @@ def curr_master( config_defaults = { "root_dir": str(root_dir), "transport": request.config.getoption("--transport"), - "user": False, + "user": "root", } publish_port = ports.get_unused_localhost_port() ret_port = ports.get_unused_localhost_port() config_overrides = { + "open_mode": True, "interface": "0.0.0.0", "publish_port": publish_port, "ret_port": ret_port, "log_level_logfile": "quiet", "pytest-master": { "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, }, } factory = salt_factories.salt_master_daemon( curr_master_id, + name=curr_master_id, defaults=config_defaults, overrides=config_overrides, - factory_class=ContainerMaster, - image="ghcr.io/saltstack/salt-ci-containers/salt:current", + factory_class=SaltMaster, base_script_args=["--log-level=debug"], + image=curr_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": curr_master_id, # Bind the current code to a directory for pip installing "volumes": { - os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"} + str(CODE_DIR): {"bind": "/salt", "mode": "z"}, }, }, - docker_client=docker_client, - name=curr_master_id, start_timeout=120, - max_start_attempts=1, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_local_salt, factory) + factory.before_start(_install_salt_in_container, factory) with factory.started(): yield factory @@ -264,7 +289,7 @@ def curr_salt_key_cli(curr_master): @pytest.fixture def curr_minion_id(): return random_string( - "minion-performance-curr-", + "minion-perf-curr-", uppercase=False, ) @@ -273,38 +298,41 @@ def curr_minion_id(): def curr_minion( curr_minion_id, curr_master, - docker_client, host_docker_network_ip_address, - network, - curr_master_id, + docker_network_name, + curr_container_image, ): config_overrides = { - "master": curr_master_id, - "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "master": curr_master.id, + "open_mode": True, + "user": "root", + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, } factory = curr_master.salt_minion_daemon( curr_minion_id, - overrides=config_overrides, - factory_class=ContainerMinion, - # SaltMinion kwargs name=curr_minion_id, - image="ghcr.io/saltstack/salt-ci-containers/salt:current", - docker_client=docker_client, - start_timeout=120, - pull_before_start=False, - skip_if_docker_client_not_connectable=True, + overrides=config_overrides, + factory_class=SaltMinion, + base_script_args=["--log-level=debug"], + image=curr_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": curr_minion_id, # Bind the current code to a directory for pip installing "volumes": { - os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"} + str(CODE_DIR): {"bind": "/salt", "mode": "z"}, }, }, - max_start_attempts=1, + start_timeout=120, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_local_salt, factory) + factory.before_start(_install_salt_in_container, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, curr_master, factory.id ) @@ -318,25 +346,25 @@ def curr_sls(sls_contents, state_tree, tmp_path): location = tmp_path / "curr" / "testfile" location.parent.mkdir() with pytest.helpers.temp_file( - "{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree + f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree ): yield sls_name -def _wait_for_stdout(expected, func, *args, timeout=120, **kwargs): - start = time.time() - while time.time() < start + timeout: - ret = func(*args, **kwargs) - if ret and ret.stdout and expected in ret.stdout: - break - time.sleep(1) - else: - pytest.skip( - f"Skipping test, one or more daemons failed to start: {expected} not found in {ret}" - ) +@pytest.fixture +def perf_state_name(state_tree, curr_master, prev_master): + + # Copy all of the needed files to both master file roots directories + subdir = random_string("perf-state-") + shutil.copytree( + state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir) + ) + shutil.copytree( + state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir) + ) + return subdir -@pytest.mark.flaky(max_runs=4) def test_performance( prev_salt_cli, prev_minion, @@ -353,48 +381,8 @@ def test_performance( prev_sls, curr_sls, curr_version, + perf_state_name, ): - # Copy all of the needed files to both master file roots directories - subdir = random_string("performance-") - shutil.copytree( - state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir) - ) - shutil.copytree( - state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir) - ) - - # Wait for the old master and minion to start - _wait_for_stdout( - prev_version, prev_master.run, *prev_salt_run_cli.cmdline("--version") - ) - salt_key_cmd = [ - comp - for comp in prev_salt_key_cli.cmdline("-Ay") - if not comp.startswith("--log-level") - ] - _wait_for_stdout(prev_minion.id, prev_master.run, *salt_key_cmd) - _wait_for_stdout( - "Salt: {}".format(prev_version), - prev_master.run, - *prev_salt_cli.cmdline("test.versions", minion_tgt=prev_minion.id), - ) - - # Wait for the new master and minion to start - _wait_for_stdout( - curr_version, curr_master.run, *curr_salt_run_cli.cmdline("--version") - ) - curr_key_cmd = [ - comp - for comp in curr_salt_key_cli.cmdline("-Ay") - if not comp.startswith("--log-level") - ] - _wait_for_stdout(curr_minion.id, curr_master.run, *curr_key_cmd) - _wait_for_stdout( - "Salt: {}".format(curr_version), - curr_master.run, - *curr_salt_cli.cmdline("test.versions", minion_tgt=curr_minion.id), - ) - # Let's now apply the states applies = os.environ.get("SALT_PERFORMANCE_TEST_APPLIES", 3) @@ -423,7 +411,9 @@ def test_performance( for _ in range(applies): prev_state_ret = prev_master.run( *prev_salt_cli.cmdline( - "state.apply", f"{subdir}.{prev_sls}", minion_tgt=prev_minion.id + "state.apply", + f"{perf_state_name}.{prev_sls}", + minion_tgt=prev_minion.id, ) ) prev_duration += _gather_durations(prev_state_ret, prev_minion.id) @@ -431,7 +421,9 @@ def test_performance( for _ in range(applies): curr_state_ret = curr_master.run( *curr_salt_cli.cmdline( - "state.apply", f"{subdir}.{curr_sls}", minion_tgt=curr_minion.id + "state.apply", + f"{perf_state_name}.{curr_sls}", + minion_tgt=curr_minion.id, ) ) curr_duration += _gather_durations(curr_state_ret, curr_minion.id) diff --git a/tests/pytests/unit/client/ssh/test_single.py b/tests/pytests/unit/client/ssh/test_single.py index 3f5459a0842..a69c315c215 100644 --- a/tests/pytests/unit/client/ssh/test_single.py +++ b/tests/pytests/unit/client/ssh/test_single.py @@ -19,17 +19,13 @@ log = logging.getLogger(__name__) @pytest.fixture -def opts(tmp_path): - return { - "argv": [ - "ssh.set_auth_key", - "root", - "hobn+amNAXSBTiOXEqlBjGB...rsa root@master", - ], - "__role": "master", - "cachedir": str(tmp_path), - "extension_modules": str(tmp_path / "extmods"), - } +def opts(master_opts): + master_opts["argv"] = [ + "ssh.set_auth_key", + "root", + "hobn+amNAXSBTiOXEqlBjGB...rsa root@master", + ] + return master_opts @pytest.fixture @@ -411,6 +407,10 @@ def test_run_ssh_pre_flight_no_connect(opts, target, tmp_path, caplog): with caplog.at_level(logging.TRACE): with patch_send, patch_exec_cmd, patch_tmp: ret = single.run_ssh_pre_flight() + + # Flush the logging handler just to be sure + caplog.handler.flush() + assert "Copying the pre flight script" in caplog.text assert "Could not copy the pre flight script to target" in caplog.text assert ret == ret_send @@ -503,6 +503,9 @@ def test_run_ssh_pre_flight_connect(opts, target, tmp_path, caplog): with patch_send, patch_exec_cmd, patch_tmp: ret = single.run_ssh_pre_flight() + # Flush the logging handler just to be sure + caplog.handler.flush() + assert "Executing the pre flight script on target" in caplog.text assert ret == ret_exec_cmd assert send_mock.call_args_list[0][0][0] == tmp_file diff --git a/tests/pytests/unit/client/ssh/test_ssh_classes.py b/tests/pytests/unit/client/ssh/test_ssh_classes.py new file mode 100644 index 00000000000..cabd4ff1722 --- /dev/null +++ b/tests/pytests/unit/client/ssh/test_ssh_classes.py @@ -0,0 +1,82 @@ +import logging + +import pytest +from saltfactories.utils.tempfiles import temp_directory + +import salt.client.ssh.__init__ as dunder_ssh +from salt.exceptions import SaltClientError, SaltSystemExit +from tests.support.mock import MagicMock, patch + +pytestmark = [pytest.mark.skip_unless_on_linux(reason="Test ssh only run on Linux")] + + +log = logging.getLogger(__name__) + + +def test_salt_refs(): + data_strg_cats = "cats" + ret = dunder_ssh.salt_refs(data_strg_cats) + assert ret == [] + + data_strg_proto = "salt://test_salt_ref" + ret = dunder_ssh.salt_refs(data_strg_proto) + assert ret == [data_strg_proto] + + data_list_no_proto = ["cats"] + ret = dunder_ssh.salt_refs(data_list_no_proto) + assert ret == [] + + data_list_proto = ["salt://test_salt_ref1", "salt://test_salt_ref2", "cats"] + ret = dunder_ssh.salt_refs(data_list_proto) + assert ret == ["salt://test_salt_ref1", "salt://test_salt_ref2"] + + +def test_convert_args(): + test_args = [ + "arg1", + {"key1": "value1", "key2": "value2", "__kwarg__": "kwords"}, + "dog1", + ] + expected = ["arg1", "key1=value1", "key2=value2", "dog1"] + ret = dunder_ssh._convert_args(test_args) + assert ret == expected + + +def test_ssh_class(): + + with temp_directory() as temp_dir: + assert temp_dir.is_dir() + opts = { + "sock_dir": temp_dir, + "regen_thin": False, + "__master_opts__": {"pki_dir": "pki"}, + "selected_target_option": None, + "tgt": "*", + "tgt_type": "glob", + "fileserver_backend": ["roots"], + "cachedir": "/tmp", + "thin_extra_mods": "", + "ssh_ext_alternatives": None, + } + + with patch("salt.utils.path.which", return_value=""), pytest.raises( + SaltSystemExit + ) as err: + test_ssh = dunder_ssh.SSH(opts) + assert ( + "salt-ssh could not be run because it could not generate keys." + in str(err.value) + ) + + with patch("salt.utils.path.which", return_value="/usr/bin/ssh"), patch( + "os.path.isfile", return_value=False + ), patch( + "salt.client.ssh.shell.gen_key", MagicMock(side_effect=OSError()) + ), pytest.raises( + SaltClientError + ) as err: + test_ssh = dunder_ssh.SSH(opts) + assert ( + "salt-ssh could not be run because it could not generate keys." + in err.value + ) diff --git a/tests/pytests/unit/client/test_init.py b/tests/pytests/unit/client/test_init.py new file mode 100644 index 00000000000..90fb91b0070 --- /dev/null +++ b/tests/pytests/unit/client/test_init.py @@ -0,0 +1,275 @@ +import pytest + +import salt.client +from salt.exceptions import SaltInvocationError + + +@pytest.fixture +def local_client(): + return salt.client.get_local_client() + + +def test_get_local_client(local_client): + """ + Test that a local client is created + """ + assert isinstance(local_client, salt.client.LocalClient) + + +def test_get_local_client_mopts(master_opts): + master_opts["rest_cherrypy"] = {"port": 8000} + local_client = salt.client.get_local_client(mopts=master_opts) + assert isinstance(local_client, salt.client.LocalClient) + assert local_client.opts == master_opts + + +@pytest.mark.parametrize( + "val, expected", + ((None, 5), (7, 7), ("9", 9), ("eleven", 5), (["13"], 5)), +) +def test_local_client_get_timeout(local_client, val, expected): + assert local_client._get_timeout(timeout=val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("group1", ["L@spongebob,patrick"]), + ("group2", ["G@os:squidward"]), + ("group3", ["(", "G@os:plankton", "and", "(", "L@spongebob,patrick", ")", ")"]), + ), +) +def test_resolve_nodegroup(master_opts, val, expected): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + assert local_client._resolve_nodegroup(val) == expected + + +def test_resolve_nodegroup_error(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + with pytest.raises(SaltInvocationError): + local_client._resolve_nodegroup("missing") + + +def test_prep_pub(local_client): + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_kwargs(local_client): + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + some_kwarg="spongebob", + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "user": local_client.salt_user, + "kwargs": { + "some_kwarg": "spongebob", + }, + } + assert result == expected + + +def test_prep_pub_order_masters(master_opts): + master_opts["order_masters"] = True + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "to": 7, + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_nodegroup(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="group1", + fun="test.ping", + arg="", + tgt_type="nodegroup", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "L@spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_compound(local_client): + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="compound", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_compound_nodegroup(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="N@group1", + fun="test.ping", + arg="", + tgt_type="compound", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "L@spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_ext_job_cache(master_opts): + master_opts["ext_job_cache"] = "mysql" + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "mysql", + "tgt": "spongebob,patrick", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_ext_job_cache_existing(master_opts): + master_opts["ext_job_cache"] = "mysql" + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="glob", + ret="postgres", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "postgres,mysql", + "tgt": "spongebob,patrick", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected diff --git a/tests/pytests/unit/cloud/test_cloud.py b/tests/pytests/unit/cloud/test_cloud.py index 510a0bb5eab..5d16c2d2cd8 100644 --- a/tests/pytests/unit/cloud/test_cloud.py +++ b/tests/pytests/unit/cloud/test_cloud.py @@ -192,6 +192,7 @@ def test_vm_config_merger_nooverridevalue(): assert expected == vm +@pytest.mark.skip_on_fips_enabled_platform def test_cloud_run_profile_create_returns_boolean(master_config): master_config["profiles"] = {"test_profile": {"provider": "test_provider:saltify"}} diff --git a/tests/pytests/unit/cloud/test_map.py b/tests/pytests/unit/cloud/test_map.py index 06f71b6d6e5..ce2999003e7 100644 --- a/tests/pytests/unit/cloud/test_map.py +++ b/tests/pytests/unit/cloud/test_map.py @@ -99,6 +99,8 @@ def salt_cloud_config_file(salt_master_factory): return os.path.join(salt_master_factory.config_dir, "cloud") +# The cloud map merge uses python's multiprocessing manager which authenticates using HMAC and MD5 +@pytest.mark.skip_on_fips_enabled_platform def test_cloud_map_merge_conf(salt_cloud_config_file, grains): """ Ensure that nested values can be selectivly overridden in a map file diff --git a/tests/pytests/unit/modules/test_hashutil.py b/tests/pytests/unit/modules/test_hashutil.py index d8f2195c174..c91e99ce6b7 100644 --- a/tests/pytests/unit/modules/test_hashutil.py +++ b/tests/pytests/unit/modules/test_hashutil.py @@ -61,6 +61,7 @@ def test_base64_decodestring(the_string, the_string_base64): assert hashutil.base64_decodestring(the_string_base64) == the_string +@pytest.mark.skip_on_fips_enabled_platform def test_md5_digest(the_string, the_string_md5): assert hashutil.md5_digest(the_string) == the_string_md5 diff --git a/tests/pytests/unit/modules/test_junos.py b/tests/pytests/unit/modules/test_junos.py new file mode 100644 index 00000000000..616f15f1d8a --- /dev/null +++ b/tests/pytests/unit/modules/test_junos.py @@ -0,0 +1,2898 @@ +""" + :codeauthor: Rajvi Dhimar +""" +import os + +import pytest +import yaml + +import salt.modules.junos as junos +from tests.support.mock import ANY, MagicMock, PropertyMock, call, mock_open, patch + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +try: + import jnpr.junos.op as tables_dir + import jxmlease # pylint: disable=unused-import + from jnpr.junos.device import Device + from jnpr.junos.exception import ConnectClosedError, LockError, UnlockError + from jnpr.junos.utils.config import Config + from jnpr.junos.utils.sw import SW + + HAS_JUNOS = True +except ImportError: + HAS_JUNOS = False + +pytestmark = [ + pytest.mark.skip_on_windows(reason="Not supported on Windows"), + pytest.mark.skipif( + not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" + ), +] + + +@pytest.fixture +def get_facts(): + facts = { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + } + return facts + + +@pytest.fixture +def make_connect(): + with patch("ncclient.manager.connect") as mock_connect: + dev = Device( + host="1.1.1.1", + user="test", + password="test123", + fact_style="old", + gather_facts=False, + ) + dev.open() + dev.timeout = 30 + dev.bind(cu=Config) + dev.bind(sw=SW) + yield dev + + +@pytest.fixture +def configure_loader_modules(get_facts, make_connect): + return { + junos: { + "__proxy__": { + "junos.conn": MagicMock(return_value=make_connect), + "junos.get_serialized_facts": MagicMock(return_value=get_facts), + "junos.reboot_active": MagicMock(return_value=True), + "junos.reboot_clear": MagicMock(return_value=True), + }, + "__salt__": { + "cp.get_template": MagicMock(return_value=True), + "cp.get_file": MagicMock(return_value=True), + "file.file_exists": MagicMock(return_value=True), + "slsutil.renderer": MagicMock( + return_value="set system host-name dummy" + ), + "event.fire_master": MagicMock(return_value=None), + }, + "_restart_connection": MagicMock(return_value=None), + }, + } + + +def raise_exception(*args, **kwargs): + raise Exception("Test exception") + + +def test__timeout_decorator(): + with patch("jnpr.junos.Device.timeout", new_callable=PropertyMock) as mock_timeout: + mock_timeout.return_value = 30 + + def function(x): + return x + + decorator = junos._timeout_decorator(function) + decorator("Test Mock", dev_timeout=10) + calls = [call(), call(10), call(30)] + mock_timeout.assert_has_calls(calls) + + +def test__timeout_cleankwargs_decorator(): + with patch("jnpr.junos.Device.timeout", new_callable=PropertyMock) as mock_timeout: + mock_timeout.return_value = 30 + + def function(x): + return x + + decorator = junos._timeout_decorator_cleankwargs(function) + decorator("Test Mock", dev_timeout=10, __pub_args="abc") + calls = [call(), call(10), call(30)] + mock_timeout.assert_has_calls(calls) + + +def test_facts_refresh(): + with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: + ret = { + "out": True, + "facts": { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + }, + } + assert junos.facts_refresh() == ret + + +def test_facts_refresh_exception(): + with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: + mock_facts_refresh.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.facts_refresh() == ret + + +def test_facts(): + ret = { + "out": True, + "facts": { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + }, + } + assert junos.facts() == ret + + +def test_facts_exception(): + with patch.dict(junos.__proxy__, {"junos.get_serialized_facts": raise_exception}): + ret = { + "message": 'Could not display facts due to "Test exception"', + "out": False, + } + assert junos.facts() == ret + + +def test_set_hostname_without_args(): + ret = { + "message": "Please provide the hostname.", + "out": False, + } + assert junos.set_hostname() == ret + + +def test_set_hostname_load_called_with_valid_name(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load: + junos.set_hostname("test-name") + mock_load.assert_called_with("set system host-name test-name", format="set") + + +def test_set_hostname_raise_exception_for_load(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load: + mock_load.side_effect = raise_exception + ret = { + "message": 'Could not load configuration due to error "Test exception"', + "out": False, + } + assert junos.set_hostname("Test-name") == ret + + +def test_set_hostname_raise_exception_for_commit_check(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.side_effect = raise_exception + ret = { + "message": 'Could not commit check due to error "Test exception"', + "out": False, + } + assert junos.set_hostname("test-name") == ret + + +def test_set_hostname_one_arg_parsed_correctly(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": ["test-name", {"comment": "Committed via salt"}], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + junos.set_hostname("test-name", **args) + mock_commit.assert_called_with(comment="Committed via salt") + + +def test_set_hostname_more_than_one_args_parsed_correctly(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": [ + "test-name", + {"comment": "Committed via salt", "confirm": 5}, + ], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + junos.set_hostname("test-name", **args) + mock_commit.assert_called_with(comment="Committed via salt", confirm=5) + + +def test_set_hostname_successful_return_message(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": ["test-name", {"comment": "Committed via salt"}], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Successfully changed hostname.", + "out": True, + } + assert junos.set_hostname("test-name", **args) == ret + + +def test_set_hostname_raise_exception_for_commit(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: + mock_commit.side_effect = raise_exception + ret = { + "message": 'Successfully loaded host-name but commit failed with "Test exception"', + "out": False, + } + assert junos.set_hostname("test-name") == ret + + +def test_set_hostname_fail_commit_check(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: + mock_commit_check.return_value = False + ret = { + "message": "Successfully loaded host-name but pre-commit check failed.", + "out": False, + } + assert junos.set_hostname("test") == ret + + +def test_commit_without_args(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit.return_value = True + mock_commit_check.return_value = True + ret = { + "message": "Commit Successful.", + "out": True, + } + assert junos.commit() == ret + + +def test_commit_raise_commit_check_exception(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.side_effect = raise_exception + ret = { + "message": 'Could not perform commit check due to "Test exception"', + "out": False, + } + assert junos.commit() == ret + + +def test_commit_raise_commit_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = { + "message": 'Commit check succeeded but actual commit failed with "Test exception"', + "out": False, + } + assert junos.commit() == ret + + +def test_commit_with_single_argument(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [{"sync": True}], + "sync": True, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.commit(**args) + mock_commit.assert_called_with(detail=False, sync=True) + + +def test_commit_with_multiple_arguments(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "comitted via salt", + "__pub_user": "root", + "__pub_arg": [ + {"comment": "comitted via salt", "confirm": 3, "detail": True} + ], + "confirm": 3, + "detail": True, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182856987820", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.commit(**args) + mock_commit.assert_called_with( + comment="comitted via salt", detail=True, confirm=3 + ) + + +def test_commit_pyez_commit_returning_false(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit.return_value = False + mock_commit_check.return_value = True + ret = { + "message": "Commit failed.", + "out": False, + } + assert junos.commit() == ret + + +def test_commit_pyez_commit_check_returns_false(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.return_value = False + ret = { + "message": "Pre-commit check failed.", + "out": False, + } + assert junos.commit() == ret + + +def test_rollback_exception(): + with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: + mock_rollback.side_effect = raise_exception + ret = { + "message": 'Rollback failed due to "Test exception"', + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_without_args_success(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + mock_rollback.return_value = True + ret = { + "message": "Rollback successful", + "out": True, + } + assert junos.rollback() == ret + + +def test_rollback_without_args_fail(): + with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: + mock_rollback.return_value = False + ret = { + "message": "Rollback failed", + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_with_id(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + junos.rollback(id=5) + mock_rollback.assert_called_with(5) + + +def test_rollback_with_id_and_single_arg(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [2, {"confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221184518526067", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(id=2, **args) + mock_rollback.assert_called_with(2) + mock_commit.assert_called_with(confirm=2) + + +def test_rollback_with_id_and_multiple_args(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "comment": "Comitted via salt", + "__pub_user": "root", + "__pub_arg": [ + 2, + {"comment": "Comitted via salt", "dev_timeout": 40, "confirm": 1}, + ], + "confirm": 1, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221192708251721", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(id=2, **args) + mock_rollback.assert_called_with(2) + mock_commit.assert_called_with( + comment="Comitted via salt", confirm=1, dev_timeout=40 + ) + + +def test_rollback_with_only_single_arg(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [{"sync": True}], + "sync": True, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221193615696475", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(**args) + mock_rollback.assert_called_once_with(0) + mock_commit.assert_called_once_with(sync=True) + + +def test_rollback_with_only_multiple_args_no_id(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "comment": "Comitted via salt", + "__pub_user": "root", + "__pub_arg": [{"comment": "Comitted via salt", "confirm": 3, "sync": True}], + "confirm": 3, + "sync": True, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221193945996362", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(**args) + mock_rollback.assert_called_with(0) + mock_commit.assert_called_once_with( + sync=True, confirm=3, comment="Comitted via salt" + ) + + +def test_rollback_with_diffs_file_option_when_diff_is_None(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff: + mock_commit_check.return_value = True + mock_diff.return_value = "diff" + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221205153884009", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + "diffs_file": "/home/regress/diff", + } + junos.rollback(**args) + mock_fopen.assert_called_with("/home/regress/diff", "w") + + +def test_rollback_with_diffs_file_option(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff: + mock_commit_check.return_value = True + mock_diff.return_value = None + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221205153884009", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + "diffs_file": "/home/regress/diff", + } + junos.rollback(**args) + assert not mock_fopen.called + + +def test_rollback_commit_check_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.side_effect = raise_exception + ret = { + "message": 'Could not commit check due to "Test exception"', + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_commit_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = { + "message": 'Rollback successful but commit failed with error "Test exception"', + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_commit_check_fails(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = False + ret = { + "message": "Rollback successful but pre-commit check failed.", + "out": False, + } + assert junos.rollback() == ret + + +def test_diff_without_args(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + junos.diff() + mock_diff.assert_called_with(rb_id=0) + + +def test_diff_with_arg(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + junos.diff(id=2) + mock_diff.assert_called_with(rb_id=2) + + +def test_diff_exception(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + mock_diff.side_effect = raise_exception + ret = { + "message": 'Could not get diff with error "Test exception"', + "out": False, + } + assert junos.diff() == ret + + +def test_ping_without_args(): + ret = { + "message": "Please specify the destination ip to ping.", + "out": False, + } + assert junos.ping() == ret + + +def test_ping(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + junos.ping("1.1.1.1") + args = mock_execute.call_args + rpc = b"1.1.1.15" + mydgm = etree.tostring(args[0][0]) + assert etree.tostring(args[0][0]) == rpc + + +def test_ping_ttl(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + args = { + "__pub_user": "sudo_drajvi", + "__pub_arg": ["1.1.1.1", {"ttl": 3}], + "__pub_fun": "junos.ping", + "__pub_jid": "20170306165237683279", + "__pub_tgt": "mac_min", + "ttl": 3, + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.ping("1.1.1.1", **args) + exec_args = mock_execute.call_args + rpc = b"1.1.1.135" + assert etree.tostring(exec_args[0][0]) == rpc + + +def test_ping_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.ping("1.1.1.1") == ret + + +def test_cli_without_args(): + ret = { + "message": "Please provide the CLI command to be executed.", + "out": False, + } + assert junos.cli() == ret + + +def test_cli_with_format_as_empty_string(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + junos.cli("show version", format="") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.return_vale = "CLI result" + ret = { + "message": "CLI result", + "out": True, + } + junos.cli("show version") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli_format_xml(): + with patch("salt.modules.junos.jxmlease.parse") as mock_jxml, patch( + "salt.modules.junos.etree.tostring" + ) as mock_to_string, patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.return_value = "test" + mock_jxml.return_value = "test" + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "xml"}], + "format": "xml", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "test", + "out": True, + } + assert junos.cli("show version", **args) == ret + mock_cli.assert_called_with("show version", "xml", warning=False) + mock_to_string.assert_called_once_with("test") + assert mock_jxml.called + + +def test_cli_exception_in_cli(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.cli("show version") == ret + + +def test_cli_output_save(): + with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( + "salt.utils.files.fopen" + ) as mock_fopen: + mock_cli.return_value = "Test return" + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], + "format": "text", + "dest": "/path/to/file", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Test return", + "out": True, + } + assert junos.cli("show version", **args) == ret + mock_fopen.assert_called_with("/path/to/file", "w") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli_output_save_ioexception(): + with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( + "salt.utils.files.fopen" + ) as mock_fopen: + mock_cli.return_value = "Test return" + mock_fopen.side_effect = IOError() + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], + "format": "text", + "dest": "/path/to/file", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": 'Unable to open "/path/to/file" to write', + "out": False, + } + assert junos.cli("show version", **args) == ret + + +def test_shutdown_without_args(): + ret = { + "message": "Provide either one of the arguments: shutdown or reboot.", + "out": False, + } + assert junos.shutdown() == ret + + +def test_shutdown_with_reboot_args(): + with patch("salt.modules.junos.SW.reboot") as mock_reboot: + ret = { + "message": "Successfully powered off/rebooted.", + "out": True, + } + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + assert mock_reboot.called + + +def test_shutdown_with_poweroff_args(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + ret = { + "message": "Successfully powered off/rebooted.", + "out": True, + } + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + assert mock_poweroff.called + + +def test_shutdown_with_shutdown_as_false(): + ret = { + "message": "Nothing to be done.", + "out": False, + } + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": False}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + + +def test_shutdown_with_in_min_arg(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + args = { + "__pub_user": "root", + "in_min": 10, + "__pub_arg": [{"in_min": 10, "shutdown": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222231445709212", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.shutdown(**args) + mock_poweroff.assert_called_with(in_min=10) + + +def test_shutdown_with_at_arg(): + with patch("salt.modules.junos.SW.reboot") as mock_reboot: + args = { + "__pub_user": "root", + "__pub_arg": [{"at": "12:00 pm", "reboot": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "201702276857", + "at": "12:00 pm", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.shutdown(**args) + mock_reboot.assert_called_with(at="12:00 pm") + + +def test_shutdown_fail_with_exception(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + mock_poweroff.side_effect = raise_exception + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": True}], + "shutdown": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": 'Could not poweroff/reboot because "Test exception"', + "out": False, + } + assert junos.shutdown(**args) == ret + + +def test_install_config_without_args(): + ret = { + "message": "Please provide the salt path where the configuration is present", + "out": False, + } + assert junos.install_config() == ret + + +def test_install_config_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = { + "message": "Invalid file path.", + "out": False, + } + assert junos.install_config("path") == ret + + +def test_install_config_file_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = { + "message": "Invalid file path.", + "out": False, + } + assert junos.install_config("path") == ret + + +def test_install_config(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.set") == ret + mock_load.assert_called_with(path="test/path/config", format="set") + + +def test_install_config_xml_file(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.xml") == ret + mock_load.assert_called_with(path="test/path/config", format="xml") + + +def test_install_config_text_file(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config") == ret + mock_load.assert_called_with(path="test/path/config", format="text") + + +def test_install_config_cache_not_exists(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value=None), + "file.rmdir": MagicMock(return_value="True"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "tempfile.mkdtemp" + ) as mock_mkdtemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_mkdtemp.return_value = "/tmp/argr5351afd" + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert ( + junos.install_config("salt://actual/path/config", template_vars=True) + == ret + ) + mock_mkstemp.assert_called_with() + + +def test_install_config_replace(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"replace": True}], + "replace": True, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.set", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="set", merge=False + ) + + +def test_install_config_overwrite(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"overwrite": True}], + "overwrite": True, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.xml", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="xml", overwrite=True + ) + + +def test_install_config_overwrite_false(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"overwrite": False}], + "overwrite": False, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="text", merge=True + ) + + +def test_install_config_load_causes_exception(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_load.side_effect = raise_exception + ret = { + "message": 'Could not load configuration due to : "Test exception"', + "out": False, + "format": "set", + } + assert junos.install_config(path="actual/path/config.set") == ret + + +def test_install_config_no_diff(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = None + ret = { + "message": "Configuration already applied!", + "out": True, + } + assert junos.install_config("actual/path/config") == ret + + +def test_install_config_write_diff(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "copy/config/here"}], + "diffs_file": "copy/config/here", + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("actual/path/config", **args) == ret + mock_fopen.assert_called_with("copy/config/here", "w") + + +def test_install_config_write_diff_exception(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as mock_fopen, patch( + "salt.utils.stringutils.to_str" + ) as mock_strgutils, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_strgutils.side_effect = raise_exception + + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "copy/config/here"}], + "diffs_file": "copy/config/here", + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Could not write into diffs_file due to: 'Test exception'", + "out": False, + } + assert junos.install_config("actual/path/config", **args) == ret + + +def test_install_config_commit_params(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + args = { + "comment": "comitted via salt", + "__pub_user": "root", + "__pub_arg": [{"comment": "comitted via salt", "confirm": 3}], + "confirm": 3, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182856987820", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("actual/path/config", **args) == ret + mock_commit.assert_called_with(comment="comitted via salt", confirm=3) + + +def test_install_config_commit_check_fails(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = False + + ret = { + "message": "Loaded configuration but commit check failed, hence rolling back configuration.", + "out": False, + } + assert junos.install_config("actual/path/config.xml") == ret + + +def test_install_config_commit_exception(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = { + "message": 'Commit check successful but commit failed with "Test exception"', + "out": False, + } + assert junos.install_config("actual/path/config") == ret + + +def test_install_config_test_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = { + "message": "Commit check passed, but skipping commit for dry-run and rolling back configuration.", + "out": True, + } + assert junos.install_config("actual/path/config", test=True) == ret + mock_commit.assert_not_called() + + +def test_install_config_write_diff_dynamic_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = { + "message": "Write diff is not supported with dynamic/ephemeral configuration mode", + "out": False, + } + assert ( + junos.install_config( + "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" + ) + == ret + ) + mock_commit.assert_not_called() + + +def test_install_config_unknown_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = { + "message": "install_config failed due to: unsupported action: abcdef", + "out": False, + } + assert junos.install_config("actual/path/config", mode="abcdef") == ret + mock_commit.assert_not_called() + + +def test_zeroize(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + result = junos.zeroize() + mock_cli.assert_called_once_with("request system zeroize") + ret = { + "message": "Completed zeroize and rebooted", + "out": True, + } + assert result == ret + + +def test_zeroize_throw_exception(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.side_effect = raise_exception + ret = { + "message": 'Could not zeroize due to : "Test exception"', + "out": False, + } + assert junos.zeroize() == ret + + +def test_install_os_without_args(): + ret = { + "message": "Please provide the salt path where the junos image is present.", + "out": False, + } + assert junos.install_os() == ret + + +def test_install_os_cp_fails(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="xxxx"), + "file.rmdir": MagicMock(return_value="True"), + }, + ): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = False + mock_install.return_value = ( + False, + "Invalid path. Please provide a valid image path", + ) + ret = { + "message": "Installation failed. Reason: Invalid path. Please provide a valid image path", + "out": False, + } + assert junos.install_os("salt://image/path/") == ret + + +def test_install_os_image_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = { + "message": "Invalid path. Please provide a valid image path", + "out": False, + } + assert junos.install_os("/image/path/") == ret + + +def test_install_os(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + assert junos.install_os("path") == ret + + +def test_install_os_failure(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = False, "because we are testing failure" + ret = { + "message": "Installation failed. Reason: because we are testing failure", + "out": False, + } + assert junos.install_os("path") == ret + + +def test_install_os_with_reboot_arg(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "jnpr.junos.utils.sw.SW.reboot" + ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.install_os", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Successfully installed and rebooted!", + "out": True, + } + assert junos.install_os("path", **args) == ret + + +def test_install_os_pyez_install_throws_exception(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.side_effect = raise_exception + ret = { + "message": 'Installation failed due to: "Test exception"', + "out": False, + } + assert junos.install_os("path") == ret + + +def test_install_os_with_reboot_raises_exception(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "jnpr.junos.utils.sw.SW.reboot" + ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + mock_reboot.side_effect = raise_exception + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.install_os", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": 'Installation successful but reboot failed due to : "Test exception"', + "out": False, + } + assert junos.install_os("path", **args) == ret + + +def test_install_os_no_copy(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + assert junos.install_os("path", no_copy=True) == ret + mock_install.assert_called_with( + "path", no_copy=True, progress=True, timeout=1800 + ) + mock_mkstemp.assert_not_called() + mock_safe_rm.assert_not_called() + + +def test_install_os_issu(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + assert junos.install_os("path", issu=True) == ret + mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) + + +def test_install_os_add_params(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + remote_path = "/path/to/file" + assert ( + junos.install_os("path", remote_path=remote_path, nssu=True, validate=True) + == ret + ) + mock_install.assert_called_with( + ANY, + nssu=True, + remote_path=remote_path, + progress=True, + validate=True, + timeout=1800, + ) + + +def test_file_copy_without_args(): + pytest.raises(TypeError, junos.file_copy) + + +@patch("paramiko.SSHClient") +@patch("scp.SCPClient.put") +@patch("scp.SCPClient.__init__") +def test_file_copy_invalid_src(mock_scpclient, mock_put, mock_ssh): + mock_scpclient.return_value = None + invalid_path = "invalid/file/path" + mock_put.side_effect = Exception(invalid_path) + with patch("os.path.isfile") as mock_isfile: + mock_isfile.return_value = False + ret = { + "message": 'Could not copy file : "invalid/file/path"', + "out": False, + } + assert junos.file_copy(invalid_path, "file") == ret + + +def test_file_copy_without_dest(): + pytest.raises(TypeError, junos.file_copy, src="/home/user/config.set") + + +def test_file_copy(): + with patch("salt.modules.junos.SCP") as mock_scp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_isfile.return_value = True + ret = { + "message": "Successfully copied file from test/src/file to file", + "out": True, + } + assert junos.file_copy(dest="file", src="test/src/file") == ret + + +def test_file_copy_exception(): + with patch("salt.modules.junos.SCP") as mock_scp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_isfile.return_value = True + mock_scp.side_effect = raise_exception + ret = { + "message": 'Could not copy file : "Test exception"', + "out": False, + } + assert junos.file_copy(dest="file", src="test/src/file") == ret + + +# These test cases test the __virtual__ function, used internally by salt +# to check if the given module is loadable. This function is not used by +# an external user. + + +def test_virtual_proxy_unavailable(): + with patch.dict(junos.__opts__, {}): + res = ( + False, + "The junos or dependent module could not be loaded: " + "junos-eznc or jxmlease or yamlordereddictloader or " + "proxy could not be loaded.", + ) + assert junos.__virtual__() == res + + +def test_virtual_all_true(): + with patch.dict(junos.__opts__, {"proxy": "test"}): + assert junos.__virtual__() == "junos" + + +def test_rpc_without_args(): + ret = { + "message": "Please provide the rpc to execute.", + "out": False, + } + assert junos.rpc() == ret + + +def test_rpc_get_config_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = { + "message": 'RPC execution failed due to "Test exception"', + "out": False, + } + assert junos.rpc("get_config") == ret + + +def test_rpc_get_config_filter(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.return_value = etree.XML("") + args = { + "__pub_user": "root", + "__pub_arg": [ + "get-config", + {"filter": ""}, + ], + "__pub_fun": "junos.rpc", + "__pub_jid": "20170314162715866528", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "filter": "", + "__pub_ret": "", + } + junos.rpc("get-config", **args) + exec_args = mock_execute.call_args + expected_rpc = b'' + assert etree.tostring(exec_args[0][0]) == expected_rpc + + +def test_rpc_get_interface_information(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + junos.rpc("get-interface-information", format="json") + args = mock_execute.call_args + expected_rpc = b'' + assert etree.tostring(args[0][0]) == expected_rpc + + +def test_rpc_get_interface_information_with_kwargs(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + args = { + "__pub_user": "root", + "__pub_arg": [ + "get-interface-information", + "", + "text", + {"terse": True, "interface_name": "lo0", "format": "text"}, + ], + "format": "text", + "terse": True, + "__pub_fun": "junos.rpc", + "__pub_jid": "20170314160943363563", + "__pub_tgt": "mac_min", + "interface_name": "lo0", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rpc("get-interface-information", **args) + args = mock_execute.call_args + expected_rpc = b'lo0' + assert etree.tostring(args[0][0]) == expected_rpc + + +def test_rpc_get_chassis_inventory_filter_as_arg(): + with patch("salt.modules.junos.jxmlease.parse") as mock_jxmlease, patch( + "salt.modules.junos.etree.tostring" + ) as mock_tostring, patch( + "salt.modules.junos.logging.Logger.warning" + ) as mock_warning, patch( + "jnpr.junos.device.Device.execute" + ) as mock_execute: + junos.rpc( + "get-chassis-inventory", + filter="", + ) + mock_warning.assert_called_with( + 'Filter ignored as it is only used with "get-config" rpc' + ) + + +def test_rpc_get_interface_information_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = { + "message": 'RPC execution failed due to "Test exception"', + "out": False, + } + assert junos.rpc("get_interface_information") == ret + + +def test_rpc_write_file_format_text(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.return_value = etree.XML("text rpc reply") + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file", format="text") + writes = m_open.write_calls() + assert writes == ["text rpc reply"], writes + + +def test_rpc_write_file_format_json(): + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file", format="json") + writes = m_open.write_calls() + assert writes == ["json rpc reply"], writes + + +def test_rpc_write_file(): + with patch("salt.modules.junos.jxmlease.parse") as mock_parse, patch( + "salt.modules.junos.etree.tostring" + ) as mock_tostring, patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_tostring.return_value = "xml rpc reply" + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file") + writes = m_open.write_calls() + assert writes == ["xml rpc reply"], writes + + +def test_lock_success(): + ret_exp = {"out": True, "message": "Successfully locked the configuration."} + ret = junos.lock() + assert ret == ret_exp + + +def test_lock_error(): + ret_exp = {"out": False, "message": 'Could not gain lock due to : "LockError"'} + with patch("jnpr.junos.utils.config.Config.lock") as mock_lock: + mock_lock.side_effect = LockError(None) + ret = junos.lock() + assert ret == ret_exp + + +def test_unlock_success(): + ret_exp = {"out": True, "message": "Successfully unlocked the configuration."} + ret = junos.unlock() + assert ret == ret_exp + + +def test_unlock_error(): + ret_exp = { + "out": False, + "message": 'Could not unlock configuration due to : "UnlockError"', + } + with patch("jnpr.junos.utils.config.Config.unlock") as mock_unlock: + mock_unlock.side_effect = UnlockError(None) + ret = junos.unlock() + assert ret == ret_exp + + +def test_load_none_path(): + ret_exp = { + "out": False, + "message": ("Please provide the salt path where the configuration is present"), + } + ret = junos.load() + assert ret == ret_exp + + +def test_load_wrong_tmp_file(): + ret_exp = { + "out": False, + "message": ( + 'Could not load configuration due to : "[Errno 2] No such file or' + " directory: '/pat/to/tmp/file'\"" + ), + "format": "text", + } + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open, patch( + "os.path.getsize" + ) as mock_getsize, patch("salt.utils.files.mkstemp") as mock_mkstmp: + mock_mkstmp.return_value = "/pat/to/tmp/file" + mock_getsize.return_value = 1000 + ret = junos.load("salt://path/to/file") + assert ret == ret_exp + + +def test_load_invalid_path(): + with patch("salt.utils.files.mkstemp") as mock_mkstmp: + mock_mkstmp.return_value = "/path/to/file" + pytest.raises(FileNotFoundError, junos.load, path="/path/to/file") + + +def test_load_no_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file") + mock_load.assert_called_with(format="text", path="/path/to/file") + assert ret == ret_exp + + +def test_load_xml_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("os.path.isfile") as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file.xml" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.xml") + mock_load.assert_called_with(format="xml", path="/path/to/file.xml") + assert ret == ret_exp + + +def test_load_xml_extension_with_kwargs(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen" + ) as fopen, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp: + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.xml", template_vars=dict(hostname="test")) + mock_load.assert_called_with( + format="xml", path="/path/to/file", template_vars={"hostname": "test"} + ) + assert ret == ret_exp + + +def test_load_set_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file.set" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.set") + mock_load.assert_called_with(format="set", path="/path/to/file.set") + assert ret == ret_exp + + +def test_load_replace_true(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", replace=True) + mock_load.assert_called_with(format="text", merge=False, path="/path/to/file") + assert ret == ret_exp + + +def test_load_replace_false(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", replace=False) + mock_load.assert_called_with(format="text", replace=False, path="/path/to/file") + assert ret == ret_exp + + +def test_load_overwrite_true(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", overwrite=True) + mock_load.assert_called_with( + format="text", overwrite=True, path="/path/to/file" + ) + assert ret == ret_exp + + +def test_load_overwrite_false(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", overwrite=False) + mock_load.assert_called_with(format="text", merge=True, path="/path/to/file") + assert ret == ret_exp + + +def test_load_error(): + ret_exp = { + "out": False, + "format": "text", + "message": 'Could not load configuration due to : "Test Error"', + } + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + mock_load.side_effect = Exception("Test Error") + ret = junos.load("/path/to/file") + assert ret == ret_exp + + +def test_load_template(): + ret_exp = { + "out": True, + "message": "Successfully loaded the configuration.", + } + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load: + ret = junos.load("tests/unit/modules/templates/basic2.j2", test=True) + assert ret == ret_exp + + +def test_commit_check_success(): + ret_exp = {"out": True, "message": "Commit check succeeded."} + ret = junos.commit_check() + assert ret == ret_exp + + +def test_commit_check_error(): + ret_exp = {"out": False, "message": "Commit check failed with "} + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_check: + mock_check.side_effect = Exception + ret = junos.commit_check() + assert ret == ret_exp + + +def test_get_table_wrong_path(): + table = "ModuleTable" + file = "sample.yml" + path = "/path/to/file" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Given table file {} cannot be located".format(file), + } + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch("jnpr.junos.factory.FactoryLoader.load") as mock_load: + ret = junos.get_table(table, file, path) + assert ret == ret_exp + mock_load.assert_not_called() + + +def test_get_table_no_path_no_file(): + table = "ModuleTable" + file = "inventory.yml" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Given table file {} cannot be located".format(file), + } + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( + "glob.glob" + ) as mock_fopen: + mock_fopen.return_value = [] + ret = junos.get_table(table, file) + assert ret == ret_exp + mock_load.assert_not_called() + + +def test_get_table_yaml_load_error(): + table = "ModuleTable" + file = "inventory.yml" + path = "/path/to/file" + message = "File not located test" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Uncaught exception during YAML Load - please report: {}".format( + message + ), + } + with patch("salt.utils.files.fopen", mock_open(), create=True) as mock_file, patch( + "glob.glob" + ) as mock_fopen, patch.object(yaml, "load") as mock_yamlload: + mock_fopen.return_value = ["/path/to/file"] + mock_yamlload.side_effect = OSError(message) + ret = junos.get_table(table, file, path) + assert ret == ret_exp + + +def test_get_table_api_error(): + table = "sample" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "sample", + "message": ( + "Uncaught exception during get API call - please report: '{}'".format( + str(table) + ) + ), + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "yaml.load" + ) as mock_yamlload, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open: + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] == ret_exp["out"] + assert ret["tablename"] == ret_exp["tablename"] + assert ret["message"] == ret_exp["message"] + + +def test_get_table_connect_closed_error(): + table = "ModuleTable" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": ( + "Got ConnectClosedError exception. Connection lost with Device(1.1.1.1)" + ), + } + with patch("jnpr.junos.factory.optable.OpTable.get") as mock_load, patch( + "yaml.load" + ) as mock_yamlload, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open: + dev = Device(host="1.1.1.1", user="rick") + mock_load.side_effect = ConnectClosedError(dev) + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] == ret_exp["out"] + assert ret["tablename"] == ret_exp["tablename"] + assert ret["message"] == ret_exp["message"] + + +def test_get_table_inventory(): + table = "ModuleTable" + file = "inventory.yml" + pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__)) + path = pyez_tables_path + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch("yaml.load") as mock_yamlload, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file, path) + assert ret["out"] + + +def test_get_table_no_path_inventory(): + table = "ModuleTable" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch("yaml.load") as mock_yamlload, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] diff --git a/tests/pytests/unit/modules/test_postgres.py b/tests/pytests/unit/modules/test_postgres.py index b9178fa038e..b828e8204b9 100644 --- a/tests/pytests/unit/modules/test_postgres.py +++ b/tests/pytests/unit/modules/test_postgres.py @@ -2,6 +2,7 @@ import datetime import re import pytest +from pytestskipmarkers.utils import platform import salt.modules.config as configmod import salt.modules.postgres as postgres @@ -117,6 +118,8 @@ def idfn(val): ids=idfn, ) def test_verify_password(role, password, verifier, method, result): + if platform.is_fips_enabled() and (method == "md5" or verifier == md5_pw): + pytest.skip("Test cannot run on a FIPS enabled platform") assert postgres._verify_password(role, password, verifier, method) == result @@ -971,6 +974,7 @@ def test_user_update3(): ) +@pytest.mark.skip_on_fips_enabled_platform def test_user_update_encrypted_passwd(): with patch( "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) @@ -1226,6 +1230,7 @@ def test_create_extension_newerthan(): assert not postgres.create_extension("foo", ext_version="a", schema="b") +@pytest.mark.skip_on_fips_enabled_platform def test_encrypt_passwords(): assert postgres._maybe_encrypt_password("foo", "bar", False) == "bar" assert ( diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index 97527d3dc24..42986c464e1 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -1,3 +1,5 @@ +import pathlib + import pytest import salt.modules.saltutil as saltutil @@ -7,8 +9,14 @@ from tests.support.mock import sentinel as s @pytest.fixture -def configure_loader_modules(): - return {saltutil: {"__opts__": {"file_client": "local"}}} +def configure_loader_modules(minion_opts): + minion_opts["file_client"] = "local" + minion_opts["master_uri"] = "tcp://127.0.0.1:4505" + return { + saltutil: { + "__opts__": minion_opts, + } + } def test_exec_kwargs(): @@ -90,12 +98,24 @@ def test_refresh_grains_default_clean_pillar_cache(): refresh_pillar.assert_called_with(clean_cache=False) +def test_refresh_grains_default_clean_pillar_cache_with_refresh_false(): + with patch("salt.modules.saltutil.refresh_modules") as refresh_modules: + saltutil.refresh_grains(refresh_pillar=False) + refresh_modules.assert_called() + + def test_refresh_grains_clean_pillar_cache(): with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: saltutil.refresh_grains(clean_pillar_cache=True) refresh_pillar.assert_called_with(clean_cache=True) +def test_refresh_grains_clean_pillar_cache_with_refresh_false(): + with patch("salt.modules.saltutil.refresh_modules") as refresh_modules: + saltutil.refresh_grains(clean_pillar_cache=True, refresh_pillar=False) + refresh_modules.assert_called() + + def test_sync_grains_default_clean_pillar_cache(): with patch("salt.modules.saltutil._sync"): with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: @@ -136,3 +156,42 @@ def test_sync_all_clean_pillar_cache(): with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: saltutil.sync_all(clean_pillar_cache=True) refresh_pillar.assert_called_with(clean_cache=True) + + +def test_list_extmods(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["cachedir"], "extmods", "dummydir").mkdir( + parents=True, exist_ok=True + ) + ret = saltutil.list_extmods() + assert "dummydir" in ret + assert ret["dummydir"] == [] + + +def test_refresh_beacons(): + ret = saltutil.refresh_beacons() + assert ret is False + + +def test_refresh_matchers(): + ret = saltutil.refresh_matchers() + assert ret is False + + +def test_refresh_modules_async_false(): + kwargs = {"async": False} + ret = saltutil.refresh_modules(**kwargs) + assert ret is False + + +def test_clear_job_cache(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["cachedir"], "minion_jobs", "dummydir").mkdir( + parents=True, exist_ok=True + ) + ret = saltutil.clear_job_cache(hours=1) + assert ret is True + + +@pytest.mark.destructive_test +def test_regen_keys(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["pki_dir"], "dummydir").mkdir(parents=True, exist_ok=True) + saltutil.regen_keys() diff --git a/tests/pytests/unit/modules/test_selinux.py b/tests/pytests/unit/modules/test_selinux.py index 05d3ca25e24..b67a1b52577 100644 --- a/tests/pytests/unit/modules/test_selinux.py +++ b/tests/pytests/unit/modules/test_selinux.py @@ -1,3 +1,5 @@ +import re + import pytest import salt.modules.selinux as selinux @@ -376,3 +378,35 @@ SELINUXTYPE=targeted for line in writes: if line.startswith("SELINUX="): assert line == "SELINUX=disabled" + + +@pytest.mark.parametrize( + "name,sel_type", + ( + ("/srv/ssl/ldap/.*[.]key", "slapd_cert_t"), + ("/srv/ssl/ldap(/.*[.](pem|crt))?", "cert_t"), + ), +) +def test_selinux_add_policy_regex(name, sel_type): + """ + Test adding policy with regex components parsing the stdout response of restorecon used in fcontext_policy_applied, new style. + """ + mock_cmd_shell = MagicMock(return_value={"retcode": 0}) + mock_cmd_run_all = MagicMock(return_value={"retcode": 0}) + + with patch.dict(selinux.__salt__, {"cmd.shell": mock_cmd_shell}), patch.dict( + selinux.__salt__, {"cmd.run_all": mock_cmd_run_all} + ): + selinux.fcontext_add_policy(name, sel_type=sel_type) + filespec = re.escape(name) + expected_cmd_shell = f"semanage fcontext -l | egrep '{filespec}'" + mock_cmd_shell.assert_called_once_with( + expected_cmd_shell, + ignore_retcode=True, + ) + expected_cmd_run_all = ( + f"semanage fcontext --modify --type {sel_type} {filespec}" + ) + mock_cmd_run_all.assert_called_once_with( + expected_cmd_run_all, + ) diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index f19dac835bf..35f545ad72b 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -1,3 +1,4 @@ +import configparser import logging import os @@ -8,11 +9,15 @@ import salt.modules.pkg_resource as pkg_resource import salt.modules.rpm_lowpkg as rpm import salt.modules.yumpkg as yumpkg import salt.utils.platform -from salt.exceptions import CommandExecutionError, SaltInvocationError +from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from tests.support.mock import MagicMock, Mock, call, patch log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_unless_on_linux, +] + @pytest.fixture def configure_loader_modules(): @@ -28,7 +33,9 @@ def configure_loader_modules(): "os_family": "RedHat", "osmajorrelease": 7, }, - "__salt__": {"pkg_resource.add_pkg": _add_data}, + "__salt__": { + "pkg_resource.add_pkg": _add_data, + }, }, pkg_resource: {}, } @@ -36,7 +43,6 @@ def configure_loader_modules(): @pytest.fixture(scope="module") def list_repos_var(): - return { "base": { "file": "/etc/yum.repos.d/CentOS-Base.repo", @@ -98,6 +104,71 @@ def yum_and_dnf(request): yield request.param["cmd"] +def test__virtual_normal(): + assert yumpkg.__virtual__() == "pkg" + + +def test__virtual_yumpkg_api(): + with patch.dict(yumpkg.__opts__, {"yum_provider": "yumpkg_api"}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: yumpkg_api provider not available", + ) + + +def test__virtual_exception(): + with patch.dict(yumpkg.__grains__, {"os": 1}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: no yum based system detected", + ) + + +def test__virtual_no_yum(): + with patch.object(yumpkg, "_yum", MagicMock(return_value=None)): + assert yumpkg.__virtual__() == (False, "DNF nor YUM found") + + +def test__virtual_non_yum_system(): + with patch.dict(yumpkg.__grains__, {"os_family": "ubuntu"}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: no yum based system detected", + ) + + +def test_strip_headers(): + output = os.linesep.join(["spongebob", "squarepants", "squidward"]) + args = ("spongebob", "squarepants") + assert yumpkg._strip_headers(output, *args) == "squidward\n" + + +def test_get_copr_repo(): + result = yumpkg._get_copr_repo("copr:spongebob/squarepants") + assert result == "copr:copr.fedorainfracloud.org:spongebob:squarepants" + + +def test_get_hold(): + line = "vim-enhanced-2:7.4.827-1.fc22" + with patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")): + assert yumpkg._get_hold(line) == "vim-enhanced-2:7.4.827-1.fc22" + + +def test_get_options(): + result = yumpkg._get_options( + repo="spongebob", + disableexcludes="squarepants", + __dunder_keyword="this is skipped", + stringvalue="string_value", + boolvalue=True, + get_extra_options=True, + ) + assert "--enablerepo=spongebob" in result + assert "--disableexcludes=squarepants" in result + assert "--stringvalue=string_value" in result + assert "--boolvalue" in result + + def test_list_pkgs(): """ Test packages listing. @@ -473,6 +544,16 @@ def test_list_patches(): assert _patch in patches["my-fake-patch-installed-1234"]["summary"] +def test_list_patches_refresh(): + expected = ["spongebob"] + mock_get_patches = MagicMock(return_value=expected) + patch_get_patches = patch.object(yumpkg, "_get_patches", mock_get_patches) + patch_refresh_db = patch.object(yumpkg, "refresh_db", MagicMock()) + with patch_refresh_db, patch_get_patches: + result = yumpkg.list_patches(refresh=True) + assert result == expected + + def test_latest_version_with_options(): with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})): @@ -564,6 +645,66 @@ def test_latest_version_with_options(): ) +def test_list_repo_pkgs_attribute_error(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + mock_run = MagicMock(return_value="3.4.5") + patch_run = patch.dict(yumpkg.__salt__, {"cmd.run": mock_run}) + mock_yum = MagicMock(return_value={"retcode": 0, "stdout": ""}) + patch_yum = patch.object(yumpkg, "_call_yum", mock_yum) + with patch_get_options, patch_run, patch_yum: + assert yumpkg.list_repo_pkgs(fromrepo=1, disablerepo=2, enablerepo=3) == {} + + +def test_list_repo_pkgs_byrepo(list_repos_var): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + stdout_installed = """\ +Installed Packages +spongebob.x86_64 1.1.el9_1 @bikini-bottom-rpms +squarepants.x86_64 1.2.el9_1 @bikini-bottom-rpms +patrick.noarch 1.3.el9_1 @rock-bottom-rpms +squidward.x86_64 1.4.el9_1 @rock-bottom-rpms""" + stdout_available = """\ +Available Packages +plankton.noarch 2.1-1.el9_2 bikini-bottom-rpms +dennis.x86_64 2.2-2.el9 bikini-bottom-rpms +man-ray.x86_64 2.3-1.el9_2 bikini-bottom-rpms +doodlebob.x86_64 2.4-1.el9_2 bikini-bottom-rpms""" + run_all_side_effect = ( + {"retcode": 0, "stdout": stdout_installed}, + {"retcode": 0, "stdout": stdout_available}, + ) + patch_salt = patch.dict( + yumpkg.__salt__, + { + "cmd.run": MagicMock(return_value="3.4.5"), + "cmd.run_all": MagicMock(side_effect=run_all_side_effect), + "config.get": MagicMock(return_value=False), + }, + ) + patch_list_repos = patch.object( + yumpkg, + "list_repos", + MagicMock(return_value=list_repos_var), + ) + with patch_get_options, patch_salt, patch_list_repos: + expected = { + "bikini-bottom-rpms": { + "dennis": ["2.2-2.el9"], + "doodlebob": ["2.4-1.el9_2"], + "man-ray": ["2.3-1.el9_2"], + "plankton": ["2.1-1.el9_2"], + "spongebob": ["1.1.el9_1"], + "squarepants": ["1.2.el9_1"], + }, + "rock-bottom-rpms": { + "patrick": ["1.3.el9_1"], + "squidward": ["1.4.el9_1"], + }, + } + result = yumpkg.list_repo_pkgs(byrepo=True) + assert result == expected + + def test_list_repo_pkgs_with_options(list_repos_var): """ Test list_repo_pkgs with and without fromrepo @@ -767,6 +908,87 @@ def test_list_upgrades_dnf(): ) +def test_list_upgrades_refresh(): + mock_call_yum = MagicMock(return_value={"retcode": 0, "stdout": ""}) + with patch.object(yumpkg, "refresh_db", MagicMock()): + with patch.object(yumpkg, "_call_yum", mock_call_yum): + assert yumpkg.list_upgrades(refresh=True) == {} + + +def test_list_upgrades_error(): + mock_return = {"retcode": 1, "Error:": "Error"} + mock_call_yum = MagicMock(return_value=mock_return) + with patch.object(yumpkg, "_call_yum", mock_call_yum): + assert yumpkg.list_upgrades(refresh=False) == {} + + +def test_list_downloaded(): + mock_walk = MagicMock( + return_value=[ + ( + "/var/cache/yum", + [], + ["pkg1-3.1-16.1.x86_64.rpm", "pkg2-1.2-13.2.x86_64.rpm"], + ) + ] + ) + mock_pkginfo = MagicMock( + side_effect=[ + { + "name": "pkg1", + "version": "3.1", + }, + { + "name": "pkg2", + "version": "1.2", + }, + ] + ) + mock_getctime = MagicMock(return_value=1696536082.861206) + mock_getsize = MagicMock(return_value=75701688) + with patch.dict(yumpkg.__salt__, {"lowpkg.bin_pkg_info": mock_pkginfo}), patch( + "salt.utils.path.os_walk", mock_walk + ), patch("os.path.getctime", mock_getctime), patch("os.path.getsize", mock_getsize): + result = yumpkg.list_downloaded() + expected = { + "pkg1": { + "3.1": { + "creation_date_time": "2023-10-05T14:01:22", + "creation_date_time_t": 1696536082, + "path": "/var/cache/yum/pkg1-3.1-16.1.x86_64.rpm", + "size": 75701688, + }, + }, + "pkg2": { + "1.2": { + "creation_date_time": "2023-10-05T14:01:22", + "creation_date_time_t": 1696536082, + "path": "/var/cache/yum/pkg2-1.2-13.2.x86_64.rpm", + "size": 75701688, + }, + }, + } + assert ( + result["pkg1"]["3.1"]["creation_date_time_t"] + == expected["pkg1"]["3.1"]["creation_date_time_t"] + ) + assert result["pkg1"]["3.1"]["path"] == expected["pkg1"]["3.1"]["path"] + assert result["pkg1"]["3.1"]["size"] == expected["pkg1"]["3.1"]["size"] + assert ( + result["pkg2"]["1.2"]["creation_date_time_t"] + == expected["pkg2"]["1.2"]["creation_date_time_t"] + ) + assert result["pkg2"]["1.2"]["path"] == expected["pkg2"]["1.2"]["path"] + assert result["pkg2"]["1.2"]["size"] == expected["pkg2"]["1.2"]["size"] + + +def test_list_installed_patches(): + mock_get_patches = MagicMock(return_value="spongebob") + with patch.object(yumpkg, "_get_patches", mock_get_patches): + result = yumpkg.list_installed_patches() + assert result == "spongebob" + + def test_list_upgrades_yum(): """ The subcommand should be "updates" with yum @@ -820,6 +1042,213 @@ def test_list_upgrades_yum(): ) +def test_modified(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.modified": mock}): + yumpkg.modified("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + +def test_clean_metadata_with_options(): + + with patch("salt.utils.pkg.clear_rtag", Mock()): + + # With check_update=True we will do a cmd.run to run the clean_cmd, and + # then a separate cmd.retcode to check for updates. + + # with fromrepo + yum_call = MagicMock() + with patch.dict( + yumpkg.__salt__, + {"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)}, + ): + yumpkg.clean_metadata(check_update=True, fromrepo="good", branch="foo") + + assert yum_call.call_count == 2 + yum_call.assert_any_call( + [ + "yum", + "--quiet", + "--assumeyes", + "clean", + "expire-cache", + "--disablerepo=*", + "--enablerepo=good", + "--branch=foo", + ], + env={}, + ignore_retcode=True, + output_loglevel="trace", + python_shell=False, + ) + yum_call.assert_any_call( + [ + "yum", + "--quiet", + "--assumeyes", + "check-update", + "--setopt=autocheck_running_kernel=false", + "--disablerepo=*", + "--enablerepo=good", + "--branch=foo", + ], + output_loglevel="trace", + env={}, + ignore_retcode=True, + python_shell=False, + ) + + +def test_del_repo_error(): + basedir = "/mr/krabs" + ret_dict = { + "spongebob": {"file": "/square/pants"}, + "patrick": {"file": "/squid/ward"}, + } + mock_list = MagicMock(return_value=ret_dict) + patch_list = patch.object(yumpkg, "list_repos", mock_list) + with patch_list: + result = yumpkg.del_repo("plankton", basedir=basedir) + expected = "Error: the plankton repo does not exist in ['/mr/krabs']" + assert result == expected + + result = yumpkg.del_repo("copr:plankton/karen", basedir=basedir) + expected = "Error: the copr:copr.fedorainfracloud.org:plankton:karen repo does not exist in ['/mr/krabs']" + assert result == expected + + +def test_del_repo_single_file(): + basedir = "/mr/krabs" + ret_dict = { + "spongebob": {"file": "/square/pants"}, + "patrick": {"file": "/squid/ward"}, + } + mock_list = MagicMock(return_value=ret_dict) + patch_list = patch.object(yumpkg, "list_repos", mock_list) + with patch_list, patch("os.remove"): + result = yumpkg.del_repo("spongebob", basedir=basedir) + expected = "File /square/pants containing repo spongebob has been removed" + assert result == expected + + +def test_download_error_no_packages(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + with patch_which, pytest.raises(SaltInvocationError): + yumpkg.download() + + +def test_download(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=False)) + patch_makedirs = patch("os.makedirs") + mock_listdir = MagicMock(side_effect=([], ["spongebob-1.2.rpm"])) + patch_listdir = patch("os.listdir", mock_listdir) + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_makedirs, patch_listdir, patch_salt: + result = yumpkg.download("spongebob") + cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + assert result == expected + + +def test_download_failed(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink") + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: + result = yumpkg.download("spongebob", "patrick") + cmd = [ + "yumdownloader", + "-q", + "--destdir=/var/cache/yum/packages", + "spongebob", + "patrick", + ] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = { + "_error": "The following package(s) failed to download: patrick", + "spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm", + } + assert result == expected + + +def test_download_missing_yumdownloader(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value=None)) + with patch_which, pytest.raises(CommandExecutionError): + yumpkg.download("spongebob") + + +def test_download_to_purge(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink") + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: + result = yumpkg.download("spongebob") + cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + assert result == expected + + +def test_download_unlink_error(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + se_listdir = ( + ["spongebob-1.2.rpm", "junk.txt"], + ["spongebob1.2.rpm", "junk.txt"], + ) + mock_listdir = MagicMock(side_effect=se_listdir) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink", MagicMock(side_effect=OSError)) + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.download("spongebob") + + +def test_file_dict(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.file_dict": mock}): + yumpkg.file_dict("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + +def test_file_list(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.file_list": mock}): + yumpkg.file_list("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + def test_refresh_db_with_options(): with patch("salt.utils.pkg.clear_rtag", Mock()): @@ -1045,6 +1474,36 @@ def test_install_with_options(): ) +def test_remove_retcode_error(): + """ + Tests that we throw an error if retcode isn't 0 + """ + name = "foo" + installed = "8:3.8.12-4.n.el7" + list_pkgs_mock = MagicMock( + side_effect=lambda **kwargs: { + name: [installed] if kwargs.get("versions_as_list", False) else installed + } + ) + cmd_mock = MagicMock( + return_value={"pid": 12345, "retcode": 1, "stdout": "", "stderr": "error"} + ) + salt_mock = { + "cmd.run_all": cmd_mock, + "lowpkg.version_cmp": rpm.version_cmp, + "pkg_resource.parse_targets": MagicMock( + return_value=({name: installed}, "repository") + ), + } + with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) + ), patch.dict(yumpkg.__salt__, salt_mock), patch.dict( + yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} + ): + with pytest.raises(CommandExecutionError): + yumpkg.remove("spongebob") + + def test_remove_with_epoch(): """ Tests that we properly identify a version containing an epoch for @@ -1233,6 +1692,54 @@ def test_install_with_epoch(): assert call == expected, call +def test_install_minion_error(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + patch_salt = patch.dict( + yumpkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock(side_effect=MinionError), + }, + ) + with patch_get_options, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.install("spongebob") + + +def test_install_no_pkg_params(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + parse_return = ("", "junk") + patch_salt = patch.dict( + yumpkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock(return_value=parse_return), + }, + ) + with patch_get_options, patch_salt: + assert yumpkg.install("spongebob") == {} + + +# My dufus attempt... but I gave up +# def test_install_repo_fancy_versions(): +# patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) +# packages = { +# "spongbob": "1*", +# "squarepants": ">1.2", +# } +# parse_return = (packages, "repository") +# patch_salt = patch.dict( +# yumpkg.__salt__, +# { +# "pkg_resource.parse_targets": MagicMock(return_value=parse_return), +# }, +# ) +# list_pkgs = {"vim": "1.1,1.2", "git": "2.1,2.2"} +# list_pkgs_list = {"vim": ["1.1", "1.2"], "git": ["2.1", "2.2"]} +# mock_list_pkgs = MagicMock(side_effect=(list_pkgs, list_pkgs_list)) +# patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) +# with patch_get_options, patch_salt, patch_list_pkgs: +# assert yumpkg.install("spongebob") == {} + + @pytest.mark.skipif(not salt.utils.platform.is_linux(), reason="Only run on Linux") def test_install_error_reporting(): """ @@ -1271,6 +1778,13 @@ def test_install_error_reporting(): assert exc_info.value.info == expected, exc_info.value.info +def test_remove_error(): + mock_salt = {"pkg_resource.parse_targets": MagicMock(side_effect=MinionError)} + with patch.dict(yumpkg.__salt__, mock_salt): + with pytest.raises(CommandExecutionError): + yumpkg.remove("spongebob") + + def test_remove_not_installed(): """ Tests that no exception raised on removing not installed package @@ -1308,6 +1822,17 @@ def test_remove_not_installed(): cmd_mock.assert_not_called() +def test_upgrade_error(): + patch_yum = patch.object(yumpkg, "_yum", return_value="yum") + patch_get_options = patch.object(yumpkg, "_get_options") + patch_list_pkgs = patch.object(yumpkg, "list_pkgs") + salt_dict = {"pkg_resource.parse_targets": MagicMock(side_effect=MinionError)} + patch_salt = patch.dict(yumpkg.__salt__, salt_dict) + with patch_yum, patch_get_options, patch_list_pkgs, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.upgrade("spongebob", refresh=False) + + def test_upgrade_with_options(): with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch( "salt.utils.systemd.has_scope", MagicMock(return_value=False) @@ -1322,6 +1847,7 @@ def test_upgrade_with_options(): exclude="kernel*", branch="foo", setopt="obsoletes=0,plugins=0", + skip_verify=True, ) cmd.assert_called_once_with( [ @@ -1336,6 +1862,7 @@ def test_upgrade_with_options(): "--setopt", "plugins=0", "--exclude=kernel*", + "--nogpgcheck", "upgrade", ], env={}, @@ -1343,6 +1870,19 @@ def test_upgrade_with_options(): python_shell=False, ) + # with fromrepo + cmd = MagicMock(return_value={"retcode": 1}) + with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}): + with pytest.raises(CommandExecutionError): + yumpkg.upgrade( + refresh=False, + fromrepo="good", + exclude="kernel*", + branch="foo", + setopt="obsoletes=0,plugins=0", + skip_verify=True, + ) + # without fromrepo cmd = MagicMock(return_value={"retcode": 0}) with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}): @@ -1375,6 +1915,64 @@ def test_upgrade_with_options(): ) +def test_upgrade_available(): + mock_return = MagicMock(return_value="non-empty value") + patch_latest_version = patch.object(yumpkg, "latest_version", mock_return) + with patch_latest_version: + assert yumpkg.upgrade_available("foo") is True + + +def test_verify_args(): + mock_verify = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.verify": mock_verify}): + yumpkg.verify("spongebob") + mock_verify.assert_called_once_with("spongebob") + + +def test_verify_kwargs(): + mock_verify = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.verify": mock_verify}): + yumpkg.verify(spongebob="squarepants") + mock_verify.assert_called_once_with(spongebob="squarepants") + + +def test_purge_not_installed(): + """ + Tests that no exception raised on purging not installed package + """ + name = "foo" + list_pkgs_mock = MagicMock(return_value={}) + cmd_mock = MagicMock( + return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} + ) + salt_mock = { + "cmd.run_all": cmd_mock, + "lowpkg.version_cmp": rpm.version_cmp, + "pkg_resource.parse_targets": MagicMock( + return_value=({name: None}, "repository") + ), + } + with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) + ), patch.dict(yumpkg.__salt__, salt_mock): + + # Test yum + with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict( + yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} + ): + yumpkg.purge(name) + cmd_mock.assert_not_called() + + # Test dnf + yumpkg.__context__.pop("yum_bin") + cmd_mock.reset_mock() + with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict( + yumpkg.__grains__, {"os": "Fedora", "osrelease": 27} + ): + yumpkg.purge(name) + cmd_mock.assert_not_called() + + def test_info_installed_with_all_versions(): """ Test the return information of all versions for the named package(s), installed on the system. @@ -1530,6 +2128,260 @@ def test_pkg_hold_tdnf(): yumpkg.hold("foo") +def test_hold_empty(): + """ + Tests that we raise a SaltInvocationError if nothing is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.hold() + + +def test_hold_pkgs_and_sources_error(): + """ + Tests that we raise a SaltInvocationError if both pkgs and sources is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.hold(pkgs=["foo", "bar"], sources=["src1", "src2"]) + + +def test_hold_pkgs_sources(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + expected = { + "foo": { + "name": "foo", + "changes": { + "new": "hold", + "old": "", + }, + "result": True, + "comment": "Package foo is now being held.", + }, + "bar": { + "name": "bar", + "changes": { + "new": "hold", + "old": "", + }, + "result": True, + "comment": "Package bar is now being held.", + }, + } + sources = [{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}] + pkgs = ["foo", "bar"] + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(sources=sources) + assert result == expected + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(pkgs=pkgs) + assert result == expected + + +def test_hold_test_true(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": True}) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": None, + "comment": "Package foo is set to be held.", + }, + } + assert result == expected + + +def test_hold_fails(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 1}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": False, + "comment": "Package foo was unable to be held.", + }, + } + assert result == expected + + +def test_hold_already_held(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + with patch_versionlock, patch_list_holds: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": True, + "comment": "Package foo is already set to be held.", + }, + } + assert result == expected + + +def test_unhold_empty(): + """ + Tests that we raise a SaltInvocationError if nothing is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.unhold() + + +def test_unhold_pkgs_and_sources_error(): + """ + Tests that we raise a SaltInvocationError if both pkgs and sources is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.unhold(pkgs=["foo", "bar"], sources=["src1", "src2"]) + + +def test_unhold_pkgs_sources(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo", "bar"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + expected = { + "foo": { + "name": "foo", + "changes": { + "new": "", + "old": "hold", + }, + "result": True, + "comment": "Package foo is no longer held.", + }, + "bar": { + "name": "bar", + "changes": { + "new": "", + "old": "hold", + }, + "result": True, + "comment": "Package bar is no longer held.", + }, + } + sources = [{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}] + pkgs = ["foo", "bar"] + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(sources=sources) + assert result == expected + + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(pkgs=pkgs) + assert result == expected + + +def test_unhold_test_true(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + patch_opts = patch.dict(yumpkg.__opts__, {"test": True}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + with patch_versionlock, patch_list_holds, patch_opts, patch_yum: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": None, + "comment": "Package foo is set to be unheld.", + }, + } + assert result == expected + + +def test_unhold_fails(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + mock_call_yum = MagicMock(return_value={"retcode": 1}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": False, + "comment": "Package foo was unable to be unheld.", + }, + } + assert result == expected + + +def test_unhold_already_unheld(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=[]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + with patch_versionlock, patch_list_holds: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": True, + "comment": "Package foo is not being held.", + }, + } + assert result == expected + + +def test_owner_empty(): + assert yumpkg.owner() == "" + + +def test_owner_not_owned(): + mock_stdout = MagicMock(return_value="not owned") + expected = { + "/fake/path1": "", + "/fake/path2": "", + } + with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_stdout}): + result = yumpkg.owner(*expected.keys()) + assert result == expected + + +def test_owner_not_owned_single(): + mock_stdout = MagicMock(return_value="not owned") + with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_stdout}): + result = yumpkg.owner("/fake/path") + assert result == "" + + +def test_parse_repo_file_error(): + mock_read = MagicMock( + side_effect=configparser.MissingSectionHeaderError("spongebob", 101, "test2") + ) + with patch.object(configparser.ConfigParser, "read", mock_read): + result = yumpkg._parse_repo_file("spongebob") + assert result == ("", {}) + + def test_pkg_hold_dnf(): """ Tests that we properly identify versionlock plugin when using dnf @@ -1611,14 +2463,84 @@ def test_pkg_hold_dnf(): ) -@pytest.mark.skipif(not yumpkg.HAS_YUM, reason="Could not import yum") -def test_yum_base_error(): - with patch("yum.YumBase") as mock_yum_yumbase: - mock_yum_yumbase.side_effect = CommandExecutionError +def test_get_yum_config_no_config(): + with patch("os.path.exists", MagicMock(return_value=False)): with pytest.raises(CommandExecutionError): yumpkg._get_yum_config() +def test_get_yum_config(grains): + os_family = grains["os_family"] + if os_family in ("Arch", "Debian", "Suse"): + pytest.skip(f"{os_family} does not have yum.conf") + setting = "cache_dir" + if os_family == "RedHat": + # This one seems to be in all of them... + # If this ever breaks in the future, we'll need to get more specific + # than os_family + setting = "installonly_limit" + result = yumpkg._get_yum_config() + assert setting in result + + +def test_get_yum_config_value_none(grains): + os_family = grains["os_family"] + if os_family in ("Arch", "Debian", "Suse"): + pytest.skip(f"{os_family} does not have yum.conf") + result = yumpkg._get_yum_config_value("spongebob") + assert result is None + + +def test_get_yum_config_unreadable(): + with patch.object( + configparser.ConfigParser, "read", MagicMock(side_effect=OSError) + ): + with pytest.raises(CommandExecutionError): + yumpkg._get_yum_config() + + +def test_get_yum_config_no_main(caplog): + mock_false = MagicMock(return_value=False) + with patch.object(configparser.ConfigParser, "read"), patch.object( + configparser.ConfigParser, "has_section", mock_false + ), patch("os.path.exists", MagicMock(return_value=True)): + yumpkg._get_yum_config() + assert "Could not find [main] section" in caplog.text + + +def test_normalize_basedir_str(): + basedir = "/etc/yum/yum.conf,/etc/yum.conf" + result = yumpkg._normalize_basedir(basedir) + assert result == ["/etc/yum/yum.conf", "/etc/yum.conf"] + + +def test_normalize_basedir_error(): + basedir = 1 + with pytest.raises(SaltInvocationError): + yumpkg._normalize_basedir(basedir) + + +def test_normalize_name_noarch(): + assert yumpkg.normalize_name("zsh.noarch") == "zsh" + + +def test_latest_version_no_names(): + assert yumpkg.latest_version() == "" + + +def test_latest_version_nonzero_retcode(): + yum_ret = {"retcode": 1, "stderr": "some error"} + mock_call_yum = MagicMock(return_value=yum_ret) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + list_pkgs_ret = {"foo": "1.1", "bar": "2.2"} + mock_list_pkgs = MagicMock(return_value=list_pkgs_ret) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + patch_refresh_db = patch.object(yumpkg, "refresh_db", MagicMock()) + with patch_list_pkgs, patch_call_yum, patch_get_options, patch_refresh_db: + assert yumpkg.latest_version("foo", "bar") == {"foo": "", "bar": ""} + + def test_group_info(): """ Test yumpkg.group_info parsing @@ -1860,6 +2782,180 @@ def test_group_info(): assert info == expected +def test_group_install(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs") + _, kwargs = yumpkg.install.call_args + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_include(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs", include="napoleon") + _, kwargs = yumpkg.install.call_args + expected.append("napoleon") + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_skip(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs", skip="plankton") + _, kwargs = yumpkg.install.call_args + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_already_present(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + mock_list_pkgs = MagicMock(return_value=expected) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + with patch_info, patch_list_pkgs, patch_install: + assert yumpkg.group_install("spongebob,mr_krabs") == {} + + +def test_group_install_no_groups(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(None) + + +def test_group_install_non_list_groups(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(1) + + +def test_group_install_non_list_skip(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(name="string", skip=1) + + +def test_group_install_non_list_include(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(name="string", include=1) + + +def test_group_list(): + mock_out = MagicMock( + return_value="""\ +Available Environment Groups: + Spongebob + Squarepants +Installed Environment Groups: + Patrick +Installed Groups: + Squidward + Sandy +Available Groups: + Mr. Krabs + Plankton +Available Language Groups: + Gary the Snail [sb]\ + """ + ) + patch_grplist = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_out}) + with patch_grplist: + result = yumpkg.group_list() + expected = { + "installed": ["Squidward", "Sandy"], + "available": ["Mr. Krabs", "Plankton"], + "installed environments": ["Patrick"], + "available environments": ["Spongebob", "Squarepants"], + "available languages": { + "Gary the Snail [sb]": { + "language": "sb", + "name": "Gary the Snail", + }, + }, + } + assert result == expected + + def test_get_repo_with_existent_repo(list_repos_var): """ Test get_repo with an existent repository @@ -2068,6 +3164,15 @@ def test_services_need_restart_requires_dnf(): pytest.raises(CommandExecutionError, yumpkg.services_need_restart) +def test_services_need_restart_no_dnf_output(): + patch_yum = patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")) + patch_booted = patch("salt.utils.systemd.booted", Mock(return_value=True)) + mock_run_stdout = MagicMock(return_value="") + patch_run_stdout = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_run_stdout}) + with patch_yum, patch_booted, patch_run_stdout: + assert yumpkg.services_need_restart() == [] + + def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs(): patch_list_pkgs = patch( "salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py new file mode 100644 index 00000000000..0b9e25ee4d5 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py @@ -0,0 +1,410 @@ +import socket + +import pytest + +import salt.modules.cmdmod +import salt.modules.win_file +import salt.modules.win_lgpo as win_lgpo +from salt.exceptions import CommandExecutionError +from tests.support.mock import patch + +try: + import win32security as ws + + HAS_WIN32 = True +except ImportError: + HAS_WIN32 = False + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, + pytest.mark.skipif(not HAS_WIN32, reason="Failed to import win32security"), +] + + +@pytest.fixture +def configure_loader_modules(): + return { + win_lgpo: { + "__salt__": { + "cmd.run": salt.modules.cmdmod.run, + "file.file_exists": salt.modules.win_file.file_exists, + "file.remove": salt.modules.win_file.remove, + }, + }, + } + + +@pytest.fixture(scope="module") +def pol_info(): + return win_lgpo._policy_info() + + +@pytest.mark.parametrize( + "val, expected", + ( + (0, False), + (1, True), + ("", False), + ("text", True), + ([], False), + ([1, 2, 3], True), + ), +) +def test_notEmpty(pol_info, val, expected): + assert pol_info._notEmpty(val) is expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (86400, 1), + ), +) +def test_seconds_to_days(pol_info, val, expected): + assert pol_info._seconds_to_days(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (1, 86400), + ), +) +def test_days_to_seconds(pol_info, val, expected): + assert pol_info._days_to_seconds(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (60, 1), + ), +) +def test_seconds_to_minutes(pol_info, val, expected): + assert pol_info._seconds_to_minutes(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (1, 60), + ), +) +def test_minutes_to_seconds(pol_info, val, expected): + assert pol_info._minutes_to_seconds(val) == expected + + +def test_strip_quotes(pol_info): + assert pol_info._strip_quotes('"spongebob"') == "spongebob" + + +def test_add_quotes(pol_info): + assert pol_info._add_quotes("squarepants") == '"squarepants"' + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (chr(0), "Disabled"), + (chr(1), "Enabled"), + (chr(2), "Invalid Value: {!r}".format(chr(2))), + ("patrick", "Invalid Value"), + ), +) +def test_binary_enable_zero_disable_one_conversion(pol_info, val, expected): + assert pol_info._binary_enable_zero_disable_one_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, None), + ("Disabled", chr(0)), + ("Enabled", chr(1)), + ("Junk", None), + ), +) +def test_binary_enable_zero_disable_one_reverse_conversion(pol_info, val, expected): + assert pol_info._binary_enable_zero_disable_one_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("0", "Administrators"), + (0, "Administrators"), + ("", "Administrators"), + ("1", "Administrators and Power Users"), + (1, "Administrators and Power Users"), + ("2", "Administrators and Interactive Users"), + (2, "Administrators and Interactive Users"), + (3, "Not Defined"), + ), +) +def test_dasd_conversion(pol_info, val, expected): + assert pol_info._dasd_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("Administrators", "0"), + ("Administrators and Power Users", "1"), + ("Administrators and Interactive Users", "2"), + ("Not Defined", "9999"), + ("Plankton", "Invalid Value"), + ), +) +def test_dasd_reverse_conversion(pol_info, val, expected): + assert pol_info._dasd_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("Not Defined", True), + (None, False), + (1, True), + (3, False), + ("spongebob", False), + ), +) +def test_in_range_inclusive(pol_info, val, expected): + assert pol_info._in_range_inclusive(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("3,1,2", "Not Defined"), + ("3,0", "Silently Succeed"), + ("3,1", "Warn but allow installation"), + ("3,2", "Do not allow installation"), + ("3,Not Defined", "Not Defined"), + ("3,spongebob", "Invalid Value"), + ), +) +def test_driver_signing_reg_conversion(pol_info, val, expected): + assert pol_info._driver_signing_reg_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("Silently Succeed", "3,0"), + ("Warn but allow installation", f"3,{chr(1)}"), + ("Do not allow installation", f"3,{chr(2)}"), + ("spongebob", "Invalid Value"), + ), +) +def test_driver_signing_reg_reverse_conversion(pol_info, val, expected): + assert pol_info._driver_signing_reg_reverse_conversion(val) == expected + + +# For the next 3 tests we can't use the parametrized decorator because the +# decorator is evaluated before the imports happen, so the HAS_WIN32 is ignored +# and the decorator tries to evaluate the win32security library on systems +# without pyWin32 +def test_sidConversion_no_conversion(pol_info): + val = ws.ConvertStringSidToSid("S-1-5-0") + expected = ["S-1-5-0"] + assert pol_info._sidConversion([val]) == expected + + +def test_sidConversion_everyone(pol_info): + val = ws.ConvertStringSidToSid("S-1-1-0") + expected = ["Everyone"] + assert pol_info._sidConversion([val]) == expected + + +def test_sidConversion_administrator(pol_info): + val = ws.LookupAccountName("", "Administrator")[0] + expected = [f"{socket.gethostname()}\\Administrator"] + assert pol_info._sidConversion([val]) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, None), + ("", ""), + ), +) +def test_usernamesToSidObjects_empty_value(pol_info, val, expected): + assert pol_info._usernamesToSidObjects(val) == expected + + +def test_usernamesToSidObjects_string_list(pol_info): + val = "Administrator,Guest" + admin_sid = ws.LookupAccountName("", "Administrator")[0] + guest_sid = ws.LookupAccountName("", "Guest")[0] + expected = [admin_sid, guest_sid] + assert pol_info._usernamesToSidObjects(val) == expected + + +def test_usernamesToSidObjects_string_list_error(pol_info): + val = "spongebob,squarepants" + with pytest.raises(CommandExecutionError): + pol_info._usernamesToSidObjects(val) + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Configured"), + ("None", "Not Configured"), + ("true", "Run Windows PowerShell scripts first"), + ("false", "Run Windows PowerShell scripts last"), + ("spongebob", "Invalid Value"), + ), +) +def test_powershell_script_order_conversion(pol_info, val, expected): + assert pol_info._powershell_script_order_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("Not Configured", None), + ("Run Windows PowerShell scripts first", "true"), + ("Run Windows PowerShell scripts last", "false"), + ("spongebob", "Invalid Value"), + ), +) +def test_powershell_script_order_reverse_conversion(pol_info, val, expected): + assert pol_info._powershell_script_order_reverse_conversion(val) == expected + + +def test_dict_lookup(pol_info): + lookup = { + "spongebob": "squarepants", + "patrick": "squidward", + "plankton": "mr.crabs", + } + assert pol_info._dict_lookup("spongebob", lookup=lookup) == "squarepants" + assert ( + pol_info._dict_lookup("squarepants", lookup=lookup, value_lookup=True) + == "spongebob" + ) + assert pol_info._dict_lookup("homer", lookup=lookup) == "Invalid Value" + assert ( + pol_info._dict_lookup("homer", lookup=lookup, value_lookup=True) + == "Invalid Value" + ) + assert pol_info._dict_lookup("homer") == "Invalid Value" + + +def test_dict_lookup_bitwise_add(pol_info): + lookup = { + 0: "spongebob", + 1: "squarepants", + 2: "patrick", + } + assert pol_info._dict_lookup_bitwise_add("Not Defined") is None + assert ( + pol_info._dict_lookup_bitwise_add("not a list", value_lookup=True) + == "Invalid Value: Not a list" + ) + assert ( + pol_info._dict_lookup_bitwise_add([], value_lookup=True) + == "Invalid Value: No lookup passed" + ) + assert ( + pol_info._dict_lookup_bitwise_add("not an int") == "Invalid Value: Not an int" + ) + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == [] + assert ( + pol_info._dict_lookup_bitwise_add( + ["spongebob", "squarepants"], lookup=lookup, value_lookup=True + ) + == 1 + ) + assert pol_info._dict_lookup_bitwise_add(1, lookup=lookup) == ["squarepants"] + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == [] + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup, test_zero=True) == [ + "spongebob" + ] + + +@pytest.mark.parametrize( + "val, expected", + ( + (["list", "of", "items"], ["list", "of", "items"]), + ("Not Defined", None), + ("list,of,items", ["list", "of", "items"]), + (7, "Invalid Value"), + ), +) +def test_multi_string_put_transform(pol_info, val, expected): + assert pol_info._multi_string_put_transform(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (["list", "of", "items"], ["list", "of", "items"]), + (None, "Not Defined"), + ("list,of,items", "Invalid Value"), + (7, "Invalid Value"), + ), +) +def test_multi_string_get_transform(pol_info, val, expected): + assert pol_info._multi_string_get_transform(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("String Item", "String Item"), + ("Not Defined", None), + (7, None), + ), +) +def test_string_put_transform(pol_info, val, expected): + assert pol_info._string_put_transform(val) == expected + + +def test__virtual__(pol_info): + assert win_lgpo.__virtual__() == "lgpo" + with patch("salt.utils.platform.is_windows", return_value=False): + assert win_lgpo.__virtual__() == ( + False, + "win_lgpo: Not a Windows System", + ) + + with patch.object(win_lgpo, "HAS_WINDOWS_MODULES", False): + assert win_lgpo.__virtual__() == ( + False, + "win_lgpo: Required modules failed to load", + ) + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, b"\x00\x00"), + ("spongebob", b"s\x00p\x00o\x00n\x00g\x00e\x00b\x00o\x00b\x00\x00\x00"), + ), +) +def test_encode_string(val, expected): + assert win_lgpo._encode_string(val) == expected + + +def test_encode_string_error(): + with pytest.raises(TypeError): + win_lgpo._encode_string(1) diff --git a/tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py b/tests/pytests/unit/modules/win_lgpo/test_adv_audit.py similarity index 68% rename from tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py rename to tests/pytests/unit/modules/win_lgpo/test_adv_audit.py index c31641ec1d8..1f8e83eeab3 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py +++ b/tests/pytests/unit/modules/win_lgpo/test_adv_audit.py @@ -4,6 +4,8 @@ import salt.modules.win_file as win_file import salt.modules.win_lgpo as win_lgpo import salt.utils.win_dacl as win_dacl import salt.utils.win_lgpo_auditpol as auditpol +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch pytestmark = [ pytest.mark.windows_whitelisted, @@ -110,7 +112,16 @@ def set_policy(): ) -def _test_adv_auditing(setting, expected): +@pytest.mark.parametrize( + "setting, expected", + [ + ("No Auditing", "0"), + ("Success", "1"), + ("Failure", "2"), + ("Success and Failure", "3"), + ], +) +def test_get_value(setting, expected): """ Helper function to set an audit setting and assert that it was successful """ @@ -120,17 +131,38 @@ def _test_adv_auditing(setting, expected): assert result == expected -def test_no_auditing(disable_legacy_auditing, set_policy): - _test_adv_auditing("No Auditing", "0") +def test_get_defaults(): + patch_context = patch.dict(win_lgpo.__context__, {}) + patch_salt = patch.dict( + win_lgpo.__utils__, {"auditpol.get_auditpol_dump": auditpol.get_auditpol_dump} + ) + with patch_context, patch_salt: + assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames") + + audit_defaults = {"junk": "defaults"} + patch_context = patch.dict( + win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults} + ) + with patch_context, patch_salt: + assert win_lgpo._get_advaudit_defaults() == audit_defaults -def test_success(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Success", "1") +def test_set_value_error(): + mock_set_file_data = MagicMock(return_value=False) + with patch.object(win_lgpo, "_set_advaudit_file_data", mock_set_file_data): + with pytest.raises(CommandExecutionError): + win_lgpo._set_advaudit_value("Audit User Account Management", "None") -def test_failure(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Failure", "2") - - -def test_success_and_failure(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Success and Failure", "3") +def test_set_value_log_messages(caplog): + mock_set_file_data = MagicMock(return_value=True) + mock_set_pol_data = MagicMock(return_value=False) + mock_context = {"lgpo.adv_audit_data": {"test_option": "test_value"}} + with patch.object( + win_lgpo, "_set_advaudit_file_data", mock_set_file_data + ), patch.object(win_lgpo, "_set_advaudit_pol_data", mock_set_pol_data), patch.dict( + win_lgpo.__context__, mock_context + ): + win_lgpo._set_advaudit_value("test_option", None) + assert "Failed to apply audit setting:" in caplog.text + assert "LGPO: Removing Advanced Audit data:" in caplog.text diff --git a/tests/pytests/unit/modules/win_lgpo/test_netsh.py b/tests/pytests/unit/modules/win_lgpo/test_netsh.py new file mode 100644 index 00000000000..f3b4aef63eb --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_netsh.py @@ -0,0 +1,135 @@ +import pytest + +import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(): + return {win_lgpo: {}} + + +def test_get_netsh_value(): + with patch.dict(win_lgpo.__context__, {"lgpo.netsh_data": {"domain": {}}}): + win_lgpo._set_netsh_value("domain", "state", "State", "NotConfigured") + with patch.dict(win_lgpo.__context__, {}): + assert win_lgpo._get_netsh_value("domain", "State") == "NotConfigured" + + context = { + "lgpo.netsh_data": { + "domain": { + "State": "ONContext", + "Inbound": "NotConfigured", + "Outbound": "NotConfigured", + "LocalFirewallRules": "NotConfigured", + }, + }, + } + with patch.dict(win_lgpo.__context__, context): + assert win_lgpo._get_netsh_value("domain", "State") == "ONContext" + + +def test_set_value_error(): + with pytest.raises(ValueError): + win_lgpo._set_netsh_value("domain", "bad_section", "junk", "junk") + + +def test_set_value_firewall(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_firewall_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="firewallpolicy", + option="Inbound", + value="spongebob", + ) + mock.assert_called_once_with( + profile="domain", + inbound="spongebob", + outbound=None, + store="lgpo", + ) + + +def test_set_value_settings(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="settings", + option="spongebob", + value="squarepants", + ) + mock.assert_called_once_with( + profile="domain", + setting="spongebob", + value="squarepants", + store="lgpo", + ) + + +def test_set_value_state(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch("salt.utils.win_lgpo_netsh.set_state", MagicMock()) as mock, patch.dict( + win_lgpo.__context__, mock_context + ): + win_lgpo._set_netsh_value( + profile="domain", + section="state", + option="junk", + value="spongebob", + ) + mock.assert_called_once_with( + profile="domain", + state="spongebob", + store="lgpo", + ) + + +def test_set_value_logging_filename(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="logging", + option="FileName", + value="Not configured", + ) + mock.assert_called_once_with( + profile="domain", + setting="FileName", + value="notconfigured", + store="lgpo", + ) + + +def test_set_value_logging_log(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="logging", + option="LogSpongebob", + value="Junk", + ) + mock.assert_called_once_with( + profile="domain", + setting="Spongebob", + value="Junk", + store="lgpo", + ) diff --git a/tests/pytests/unit/modules/win_lgpo/test_policy_info.py b/tests/pytests/unit/modules/win_lgpo/test_policy_info.py index b728ab3de89..d0ed3c911a3 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_policy_info.py +++ b/tests/pytests/unit/modules/win_lgpo/test_policy_info.py @@ -5,6 +5,7 @@ import pytest import salt.modules.win_file as win_file import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch pytestmark = [ pytest.mark.windows_whitelisted, @@ -42,6 +43,18 @@ def test_get_policy_name(): assert result == expected +def test_get_adml_display_name_bad_name(): + result = win_lgpo._getAdmlDisplayName("junk", "spongbob") + assert result is None + + +def test_get_adml_display_name_no_results(): + patch_xpath = patch.object(win_lgpo, "ADML_DISPLAY_NAME_XPATH", return_value=[]) + with patch_xpath: + result = win_lgpo._getAdmlDisplayName("junk", "$(spongbob.squarepants)") + assert result is None + + def test_get_policy_id(): result = win_lgpo.get_policy( policy_name="WfwPublicSettingsNotification", @@ -156,3 +169,78 @@ def test_get_policy_id_full_return_full_names_hierarchical(): } } assert result == expected + + +def test_transform_value_missing_type(): + policy = {"Transform": {"some_type": "junk"}} + result = win_lgpo._transform_value( + value="spongebob", + policy=policy, + transform_type="different_type", + ) + assert result == "spongebob" + + +def test_transform_value_registry(): + policy = {"Registry": {}} + result = win_lgpo._transform_value( + value="spongebob", + policy=policy, + transform_type="different_type", + ) + assert result == "spongebob" + + +def test_transform_value_registry_not_set(): + policy = {"Registry": {}} + result = win_lgpo._transform_value( + value="(value not set)", + policy=policy, + transform_type="different_type", + ) + assert result == "Not Defined" + + +def test_validate_setting_not_in_list(): + policy = {"Settings": ["junk"]} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert not result + + +def test_validate_setting_in_list(): + policy = {"Settings": ["spongebob"]} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert result + + +def test_validate_setting_not_list_or_dict(): + policy = {"Settings": "spongebob"} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert result + + +def test_add_account_rights_error(): + patch_w32sec = patch( + "win32security.LsaOpenPolicy", MagicMock(side_effect=Exception) + ) + with patch_w32sec: + assert win_lgpo._addAccountRights("spongebob", "junk") is False + + +def test_del_account_rights_error(): + patch_w32sec = patch( + "win32security.LsaOpenPolicy", MagicMock(side_effect=Exception) + ) + with patch_w32sec: + assert win_lgpo._delAccountRights("spongebob", "junk") is False + + +def test_validate_setting_no_function(): + policy = { + "Settings": { + "Function": "_in_range_inclusive", + "Args": {"min": 0, "max": 24}, + }, + } + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert not result diff --git a/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py b/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py new file mode 100644 index 00000000000..79c8a10393c --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py @@ -0,0 +1,53 @@ +""" +:codeauthor: Shane Lee +""" +import pytest + +import salt.modules.win_lgpo as win_lgpo + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, +] + + +@pytest.fixture +def reg_pol_dword(): + data = ( + b"PReg\x01\x00\x00\x00" # Header + b"[\x00" # Opening list of policies + b"S\x00o\x00m\x00e\x00\\\x00K\x00e\x00y\x00\x00\x00;\x00" # Key + b"V\x00a\x00l\x00u\x00e\x00N\x00a\x00m\x00e\x00\x00\x00;\x00" # Value + b"\x04\x00\x00\x00;\x00" # Reg DWord Type + b"\x04\x00\x00\x00;\x00" # Size + # b"\x01\x00\x00\x00" # Reg Dword Data + b"\x00\x00\x00\x00" # No Data + b"]\x00" # Closing list of policies + ) + yield data + + +def test_get_data_from_reg_pol_data(reg_pol_dword): + encoded_name = "ValueName".encode("utf-16-le") + encoded_null = chr(0).encode("utf-16-le") + encoded_semicolon = ";".encode("utf-16-le") + encoded_type = chr(4).encode("utf-16-le") + encoded_size = chr(4).encode("utf-16-le") + search_string = b"".join( + [ + encoded_semicolon, + encoded_name, + encoded_null, + encoded_semicolon, + encoded_type, + encoded_null, + encoded_semicolon, + encoded_size, + encoded_null, + ] + ) + result = win_lgpo._getDataFromRegPolData( + search_string, reg_pol_dword, return_value_name=True + ) + assert result == {"ValueName": 0} diff --git a/tests/pytests/unit/modules/win_lgpo/test_secedit.py b/tests/pytests/unit/modules/win_lgpo/test_secedit.py new file mode 100644 index 00000000000..47a39fb8250 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_secedit.py @@ -0,0 +1,83 @@ +import pytest + +import salt.modules.cmdmod as cmd +import salt.modules.win_file as win_file +import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(tmp_path): + cachedir = tmp_path / "__test_admx_policy_cache_dir" + cachedir.mkdir(parents=True, exist_ok=True) + return { + win_lgpo: { + "__salt__": { + "cmd.run": cmd.run, + "file.file_exists": win_file.file_exists, + "file.remove": win_file.remove, + }, + "__opts__": { + "cachedir": str(cachedir), + }, + }, + } + + +def test_load_secedit_data(): + result = win_lgpo._load_secedit_data() + result = [x.strip() for x in result] + assert "[Unicode]" in result + assert "[System Access]" in result + + +def test_get_secedit_data(): + with patch.dict(win_lgpo.__context__, {}): + result = win_lgpo._get_secedit_data() + result = [x.strip() for x in result] + assert "[Unicode]" in result + assert "[System Access]" in result + + +def test_get_secedit_data_existing_context(): + mock_context = {"lgpo.secedit_data": ["spongebob", "squarepants"]} + with patch.dict(win_lgpo.__context__, mock_context): + result = win_lgpo._get_secedit_data() + result = [x.strip() for x in result] + assert "spongebob" in result + assert "squarepants" in result + + +def test_get_secedit_value(): + result = win_lgpo._get_secedit_value("AuditDSAccess") + assert result == "0" + + +def test_get_secedit_value_not_defined(): + result = win_lgpo._get_secedit_value("Spongebob") + assert result == "Not Defined" + + +def test_write_secedit_data_import_fail(caplog): + patch_cmd_retcode = patch.dict( + win_lgpo.__salt__, {"cmd.retcode": MagicMock(return_value=1)} + ) + with patch_cmd_retcode: + assert win_lgpo._write_secedit_data("spongebob") is False + assert "Secedit failed to import template data" in caplog.text + + +def test_write_secedit_data_configure_fail(caplog): + patch_cmd_retcode = patch.dict( + win_lgpo.__salt__, {"cmd.retcode": MagicMock(side_effect=[0, 1])} + ) + with patch_cmd_retcode: + assert win_lgpo._write_secedit_data("spongebob") is False + assert "Secedit failed to apply security database" in caplog.text diff --git a/tests/pytests/unit/states/postgresql/test_group.py b/tests/pytests/unit/states/postgresql/test_group.py index 2eb77bf4c0f..6957ce54540 100644 --- a/tests/pytests/unit/states/postgresql/test_group.py +++ b/tests/pytests/unit/states/postgresql/test_group.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform import salt.modules.postgres as postgres import salt.states.postgres_group as postgres_group @@ -19,6 +20,8 @@ def fixture_db_args(): @pytest.fixture(name="md5_pw") def fixture_md5_pw(): + if platform.is_fips_enabled(): + pytest.skip("Test cannot run on a FIPS enabled platform") # 'md5' + md5('password' + 'groupname') return "md58b14c378fab8ef0dc227f4e6d6787a87" @@ -79,6 +82,7 @@ def configure_loader_modules(mocks): # ========== +@pytest.mark.skip_on_fips_enabled_platform def test_present_create_basic(mocks, db_args): assert postgres_group.present("groupname") == { "name": "groupname", @@ -343,6 +347,7 @@ def test_present_update_md5_password(mocks, existing_group, md5_pw, db_args): ) +@pytest.mark.skip_on_fips_enabled_platform def test_present_update_error(mocks, existing_group): existing_group["password"] = "md500000000000000000000000000000000" mocks["postgres.role_get"].return_value = existing_group diff --git a/tests/pytests/unit/states/postgresql/test_user.py b/tests/pytests/unit/states/postgresql/test_user.py index 46d76535144..1d5dba9b1bb 100644 --- a/tests/pytests/unit/states/postgresql/test_user.py +++ b/tests/pytests/unit/states/postgresql/test_user.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform import salt.modules.postgres as postgres import salt.states.postgres_user as postgres_user @@ -25,6 +26,8 @@ def fixture_db_args(): @pytest.fixture(name="md5_pw") def fixture_md5_pw(): # 'md5' + md5('password' + 'username') + if platform.is_fips_enabled(): + pytest.skip("Test cannot run on a FIPS enabled platform") return "md55a231fcdb710d73268c4f44283487ba2" diff --git a/tests/pytests/unit/states/test_boto_cloudwatch_event.py b/tests/pytests/unit/states/test_boto_cloudwatch_event.py index 2974947e60e..684744464e7 100644 --- a/tests/pytests/unit/states/test_boto_cloudwatch_event.py +++ b/tests/pytests/unit/states/test_boto_cloudwatch_event.py @@ -17,6 +17,7 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/unit/states/test_boto_iot.py b/tests/pytests/unit/states/test_boto_iot.py index 594cd9982bb..6da6628b655 100644 --- a/tests/pytests/unit/states/test_boto_iot.py +++ b/tests/pytests/unit/states/test_boto_iot.py @@ -18,6 +18,7 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/unit/states/test_http.py b/tests/pytests/unit/states/test_http.py index a672845e5c3..85150b4a2a8 100644 --- a/tests/pytests/unit/states/test_http.py +++ b/tests/pytests/unit/states/test_http.py @@ -42,26 +42,124 @@ def test_query(): with patch.dict(http.__salt__, {"http.query": mock}): assert http.query("salt", "Dude", "stack") == ret[1] + with patch.dict(http.__opts__, {"test": False}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": True, + "comment": "Status 200 was found.", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query(name="http://example.com/", status=200, decode=False) + == expected + ) + + with patch.dict(http.__opts__, {"test": False}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": True, + "comment": "Status 200 was found.", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.wait_for_successful_query": mock}): + assert ( + http.query(name="http://example.com/", status=200, wait_for=300) + == expected + ) + + with patch.dict(http.__opts__, {"test": True}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": None, + "comment": "Status 200 was found. (TEST MODE, TEST URL WAS: http://status.example.com)", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + name="http://example.com/", + status=200, + test_url="http://status.example.com", + ) + == expected + ) + def test_query_pcre_statustype(): """ Test to perform an HTTP query with a regex used to match the status code and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": ( - 'Match text "This page returned" was found. Status pattern "200|201" was' - " found." - ), - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ( + 'Match text "This page returned" was found. Status pattern "200|201" was' + " found." + ), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + status="200|201", + status_type="pcre", + ) + == state_return + ) + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Status pattern "200|201" was found.'), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + status="200|201", + status_type="pcre", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + mock = MagicMock(return_value=http_result) + + state_return = { + "name": "salturl", + "result": False, + "comment": 'Match text "This page returned" was found. Status pattern "200|201" was not found.', + "changes": {}, + "data": {"text": "This page returned a 403 status code", "status": "403"}, + } + with patch.dict(http.__salt__, {"http.query": mock}): assert ( http.query( @@ -74,23 +172,109 @@ def test_query_pcre_statustype(): ) +def test_query_pcre_matchtype(): + """ + Test to perform an HTTP query with a regex used to match the returned text and statefully return the result + """ + testurl = "salturl" + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Match pattern "This page returned" was found.'), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + match_type="pcre", + ) + == state_return + ) + + http_result = { + "text": "This page did not return a 201 status code", + "status": "403", + } + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Match pattern "This page returned" was not found.'), + "data": { + "status": "403", + "text": "This page did not return a 201 status code", + }, + "name": testurl, + "result": False, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + match_type="pcre", + ) + == state_return + ) + + def test_query_stringstatustype(): """ Test to perform an HTTP query with a string status code and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": 'Match text "This page returned" was found. Status 201 was found.', - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "changes": {}, + "comment": 'Match text "This page returned" was found. Status 201 was found.', + "data": { + "status": "201", + "text": "This page returned a 201 status code", + }, + "name": testurl, + "result": True, + } + + assert ( + http.query( + testurl, + match="This page returned", + status="201", + status_type="string", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + mock = MagicMock(return_value=http_result) + + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "name": "salturl", + "result": False, + "comment": 'Match text "This page returned" was found. Status 201 was not found.', + "changes": {}, + "data": { + "text": "This page returned a 403 status code", + "status": "403", + }, + } + assert ( http.query( testurl, @@ -102,21 +286,54 @@ def test_query_stringstatustype(): ) +def test_query_invalidstatustype(): + """ + Test to perform an HTTP query with a string status code and statefully return the result + """ + testurl = "salturl" + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "name": "salturl", + "result": None, + "comment": "", + "changes": {}, + "data": { + "text": "This page returned a 201 status code", + "status": "201", + }, + } + + assert ( + http.query( + testurl, + status="201", + status_type="invalid", + ) + == state_return + ) + + def test_query_liststatustype(): """ Test to perform an HTTP query with a list of status codes and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": 'Match text "This page returned" was found. Status 201 was found.', - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + state_return = { + "changes": {}, + "comment": 'Match text "This page returned" was found. Status 201 was found.', + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + mock = MagicMock(return_value=http_result) with patch.dict(http.__salt__, {"http.query": mock}): assert ( @@ -129,6 +346,48 @@ def test_query_liststatustype(): == state_return ) + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + state_return = { + "changes": {}, + "comment": "Status 201 was found.", + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + status=["200", "201"], + status_type="list", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + state_return = { + "name": "salturl", + "result": False, + "comment": "Match text \"This page returned a 200\" was not found. Statuses ['200', '201'] were not found.", + "changes": {}, + "data": {"text": "This page returned a 403 status code", "status": "403"}, + } + + mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned a 200", + status=["200", "201"], + status_type="list", + ) + == state_return + ) + def test_wait_for_with_interval(): """ @@ -156,3 +415,22 @@ def test_wait_for_without_interval(): with patch("time.sleep", MagicMock()) as sleep_mock: assert http.wait_for_successful_query("url", status=200) == {"result": True} sleep_mock.assert_not_called() + + query_mock = MagicMock(return_value={"result": False}) + + with patch.object(http, "query", query_mock): + with patch( + "time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958]) + ): + assert http.wait_for_successful_query("url", status=200) == { + "result": False + } + + query_mock = MagicMock(side_effect=Exception()) + + with patch.object(http, "query", query_mock): + with patch( + "time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958]) + ): + with pytest.raises(Exception): + http.wait_for_successful_query("url", status=200) diff --git a/tests/pytests/unit/transport/test_base.py b/tests/pytests/unit/transport/test_base.py new file mode 100644 index 00000000000..da5a6fa2615 --- /dev/null +++ b/tests/pytests/unit/transport/test_base.py @@ -0,0 +1,21 @@ +""" +Unit tests for salt.transport.base. +""" +import pytest + +import salt.transport.base + +pytestmark = [ + pytest.mark.core_test, +] + + +def test_unclosed_warning(): + + transport = salt.transport.base.Transport() + assert transport._closing is False + assert transport._connect_called is False + transport.connect() + assert transport._connect_called is True + with pytest.warns(salt.transport.base.TransportWarning): + del transport diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 739aeb573e1..766ca20c53c 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -53,3 +53,33 @@ def test_pub_client_init(minion_opts, io_loop): minion_opts, io_loop, host=minion_opts["master_ip"], port=121212 ) as client: client.send(b"asf") + + +async def test_unclosed_request_client(minion_opts, io_loop): + minion_opts["master_uri"] = "tcp://127.0.0.1:4506" + client = salt.transport.zeromq.RequestClient(minion_opts, io_loop) + await client.connect() + try: + assert client._closing is False + with pytest.warns(salt.transport.base.TransportWarning): + client.__del__() + finally: + client.close() + + +async def test_unclosed_publish_client(minion_opts, io_loop): + minion_opts["id"] = "minion" + minion_opts["__role"] = "minion" + minion_opts["master_ip"] = "127.0.0.1" + minion_opts["zmq_filtering"] = True + minion_opts["zmq_monitor"] = True + client = salt.transport.zeromq.PublishClient( + minion_opts, io_loop, host=minion_opts["master_ip"], port=121212 + ) + await client.connect() + try: + assert client._closing is False + with pytest.warns(salt.transport.base.TransportWarning): + client.__del__() + finally: + client.close() diff --git a/tests/pytests/unit/utils/jinja/test_custom_extensions.py b/tests/pytests/unit/utils/jinja/test_custom_extensions.py index 4d004230fcb..d213b69709d 100644 --- a/tests/pytests/unit/utils/jinja/test_custom_extensions.py +++ b/tests/pytests/unit/utils/jinja/test_custom_extensions.py @@ -46,7 +46,6 @@ def minion_opts(tmp_path, minion_opts): "file_roots": {"test": [str(tmp_path / "templates")]}, "pillar_roots": {"test": [str(tmp_path / "templates")]}, "fileserver_backend": ["roots"], - "hash_type": "md5", "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), @@ -1041,6 +1040,7 @@ def test_method_call(minion_opts, local_salt): assert rendered == "None" +@pytest.mark.skip_on_fips_enabled_platform def test_md5(minion_opts, local_salt): """ Test the `md5` Jinja filter. diff --git a/tests/pytests/unit/utils/jinja/test_get_template.py b/tests/pytests/unit/utils/jinja/test_get_template.py index 35fc188b812..cdba34fa171 100644 --- a/tests/pytests/unit/utils/jinja/test_get_template.py +++ b/tests/pytests/unit/utils/jinja/test_get_template.py @@ -61,7 +61,6 @@ def minion_opts(tmp_path, minion_opts): "file_roots": {"test": [str(tmp_path / "files" / "test")]}, "pillar_roots": {"test": [str(tmp_path / "files" / "test")]}, "fileserver_backend": ["roots"], - "hash_type": "md5", "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py index e0f5fa158ff..be68660bccf 100644 --- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py +++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py @@ -2,11 +2,10 @@ Tests for salt.utils.jinja """ -import copy import os import pytest -from jinja2 import Environment, exceptions +from jinja2 import Environment, TemplateNotFound, exceptions # dateutils is needed so that the strftime jinja filter is loaded import salt.utils.dateutils # pylint: disable=unused-import @@ -15,7 +14,7 @@ import salt.utils.json # pylint: disable=unused-import import salt.utils.stringutils # pylint: disable=unused-import import salt.utils.yaml # pylint: disable=unused-import from salt.utils.jinja import SaltCacheLoader -from tests.support.mock import Mock, call, patch +from tests.support.mock import MagicMock, call, patch @pytest.fixture @@ -25,7 +24,7 @@ def minion_opts(tmp_path, minion_opts): "file_buffer_size": 1048576, "cachedir": str(tmp_path), "file_roots": {"test": [str(tmp_path / "files" / "test")]}, - "pillar_roots": {"test": [str(tmp_path / "files" / "test")]}, + "pillar_roots": {"test": [str(tmp_path / "pillar" / "test")]}, "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), @@ -108,7 +107,7 @@ def get_loader(mock_file_client, minion_opts): if opts is None: opts = minion_opts mock_file_client.opts = opts - loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client) + loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client, **kwargs) # Create a mock file client and attach it to the loader return loader @@ -128,10 +127,27 @@ def test_searchpath(minion_opts, get_loader, tmp_path): """ The searchpath is based on the cachedir option and the saltenv parameter """ - opts = copy.deepcopy(minion_opts) - opts.update({"cachedir": str(tmp_path)}) - loader = get_loader(opts=minion_opts, saltenv="test") - assert loader.searchpath == [str(tmp_path / "files" / "test")] + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + assert loader.searchpath == minion_opts["file_roots"][saltenv] + + +def test_searchpath_pillar_rend(minion_opts, get_loader): + """ + The searchpath is based on the pillar_rend if it is True + """ + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True) + assert loader.searchpath == minion_opts["pillar_roots"][saltenv] + + +def test_searchpath_bad_pillar_rend(minion_opts, get_loader): + """ + The searchpath is based on the pillar_rend if it is True + """ + saltenv = "bad_env" + loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True) + assert loader.searchpath == [] def test_mockclient(minion_opts, template_dir, hello_simple, get_loader): @@ -208,7 +224,7 @@ def test_cached_file_client(get_loader, minion_opts): """ Multiple instantiations of SaltCacheLoader use the cached file client """ - with patch("salt.channel.client.ReqChannel.factory", Mock()): + with patch("salt.channel.client.ReqChannel.factory", MagicMock()): loader_a = SaltCacheLoader(minion_opts) loader_b = SaltCacheLoader(minion_opts) assert loader_a._file_client is loader_b._file_client @@ -230,7 +246,7 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): file_client does not have a destroy method """ # Test SaltCacheLoader creating and destroying the file client created - file_client = Mock() + file_client = MagicMock() with patch("salt.fileclient.get_file_client", return_value=file_client): loader = SaltCacheLoader(minion_opts) assert loader._file_client is None @@ -240,9 +256,9 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): assert file_client.mock_calls == [call.destroy()] # Test SaltCacheLoader reusing the file client passed - file_client = Mock() + file_client = MagicMock() file_client.opts = {"file_roots": minion_opts["file_roots"]} - with patch("salt.fileclient.get_file_client", return_value=Mock()): + with patch("salt.fileclient.get_file_client", return_value=MagicMock()): loader = SaltCacheLoader(minion_opts, _file_client=file_client) assert loader._file_client is file_client with loader: @@ -254,9 +270,9 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): # passed because the "file_roots" option is different, and, as such, # the destroy method on the new file client is called, but not on the # file client passed in. - file_client = Mock() + file_client = MagicMock() file_client.opts = {"file_roots": ""} - new_file_client = Mock() + new_file_client = MagicMock() with patch("salt.fileclient.get_file_client", return_value=new_file_client): loader = SaltCacheLoader(minion_opts, _file_client=file_client) assert loader._file_client is file_client @@ -266,3 +282,65 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): assert loader._file_client is None assert file_client.mock_calls == [] assert new_file_client.mock_calls == [call.destroy()] + + +def test_check_cache_miss(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with patch.object(loader, "cached", []): + with patch.object(loader, "cache_file") as cache_mock: + loader.check_cache(str(hello_simple)) + cache_mock.assert_called_once() + + +def test_check_cache_hit(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with patch.object(loader, "cached", [str(hello_simple)]): + with patch.object(loader, "cache_file") as cache_mock: + loader.check_cache(str(hello_simple)) + cache_mock.assert_not_called() + + +def test_get_source_no_environment( + get_loader, minion_opts, relative_rhello, relative_dir +): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source(None, str(".." / relative_rhello.relative_to(relative_dir))) + + +def test_get_source_relative_no_tpldir( + get_loader, minion_opts, relative_rhello, relative_dir +): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source( + MagicMock(globals={}), str(".." / relative_rhello.relative_to(relative_dir)) + ) + + +def test_get_source_template_doesnt_exist(get_loader, minion_opts): + saltenv = "test" + fake_path = "fake_path" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source(None, fake_path) + + +def test_get_source_template_removed(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + contents, filepath, uptodate = loader.get_source(None, str(hello_simple)) + hello_simple.unlink() + assert uptodate() is False + + +def test_no_destroy_method_on_file_client(get_loader, minion_opts): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + loader._close_file_client = True + # This should fail silently, thus no error catching + loader.destroy() diff --git a/tests/pytests/unit/utils/parsers/test_daemon_mixin.py b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py new file mode 100644 index 00000000000..ea835d90e4a --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py @@ -0,0 +1,78 @@ +""" +Tests the PIDfile deletion in the DaemonMixIn. +""" + +import logging + +import pytest + +import salt.utils.parsers +from tests.support.mock import ANY, MagicMock, patch + + +@pytest.fixture +def daemon_mixin(): + mixin = salt.utils.parsers.DaemonMixIn() + mixin.config = {"pidfile": "/some/fake.pid"} + return mixin + + +def test_pid_file_deletion(daemon_mixin): + """ + PIDfile deletion without exception. + """ + with patch("os.unlink", MagicMock()) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + daemon_mixin._mixin_before_exit() + unlink_mock.assert_called_once() + log_mock.info.assert_not_called() + log_mock.debug.assert_not_called() + + +def test_pid_deleted_oserror_as_root(daemon_mixin): + """ + PIDfile deletion with exception, running as root. + """ + with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + if salt.utils.platform.is_windows(): + patch_args = ( + "salt.utils.win_functions.is_admin", + MagicMock(return_value=True), + ) + else: + patch_args = ("os.getuid", MagicMock(return_value=0)) + + with patch(*patch_args): + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_called_with( + "PIDfile(%s) could not be deleted: %s", + format(daemon_mixin.config["pidfile"], ""), + ANY, + exc_info_on_loglevel=logging.DEBUG, + ) + + +def test_pid_deleted_oserror_as_non_root(daemon_mixin): + """ + PIDfile deletion with exception, running as non-root. + """ + with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + if salt.utils.platform.is_windows(): + patch_args = ( + "salt.utils.win_functions.is_admin", + MagicMock(return_value=False), + ) + else: + patch_args = ("os.getuid", MagicMock(return_value=1000)) + + with patch(*patch_args): + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_not_called() + log_mock.debug.assert_not_called() diff --git a/tests/pytests/unit/utils/parsers/test_log_parsers.py b/tests/pytests/unit/utils/parsers/test_log_parsers.py new file mode 100644 index 00000000000..2b56ccc0da4 --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_log_parsers.py @@ -0,0 +1,784 @@ +""" + :codeauthor: Denys Havrysh +""" + +import logging +import os +import pprint + +import pytest + +import salt._logging +import salt.config +import salt.syspaths +import salt.utils.jid +import salt.utils.parsers +import salt.utils.platform +from tests.support.mock import MagicMock, patch + +log = logging.getLogger(__name__) + + +class LogImplMock: + """ + Logger setup + """ + + def __init__(self): + """ + init + """ + self.log_level_console = None + self.log_file = None + self.log_level_logfile = None + self.config = self.original_config = None + logging_options = salt._logging.get_logging_options_dict() + if logging_options: + self.config = logging_options.copy() + self.original_config = self.config.copy() + self.temp_log_level = None + self._console_handler_configured = False + self._extended_logging_configured = False + self._logfile_handler_configured = False + self._real_set_logging_options_dict = salt._logging.set_logging_options_dict + self._real_get_logging_options_dict = salt._logging.get_logging_options_dict + self._real_setup_logfile_handler = salt._logging.setup_logfile_handler + + def _destroy(self): + salt._logging.set_logging_options_dict.__options_dict__ = self.original_config + salt._logging.shutdown_logfile_handler() + + def setup_temp_handler(self, log_level=None): + """ + Set temp handler loglevel + """ + log.debug("Setting temp handler log level to: %s", log_level) + self.temp_log_level = log_level + + def is_console_handler_configured(self): + log.debug("Calling is_console_handler_configured") + return self._console_handler_configured + + def setup_console_handler( + self, log_level="error", **kwargs + ): # pylint: disable=unused-argument + """ + Set console loglevel + """ + log.debug("Setting console handler log level to: %s", log_level) + self.log_level_console = log_level + self._console_handler_configured = True + + def shutdown_console_handler(self): + log.debug("Calling shutdown_console_handler") + self._console_handler_configured = False + + def is_extended_logging_configured(self): + log.debug("Calling is_extended_logging_configured") + return self._extended_logging_configured + + def setup_extended_logging(self, opts): + """ + Set opts + """ + log.debug("Calling setup_extended_logging") + self._extended_logging_configured = True + + def shutdown_extended_logging(self): + log.debug("Calling shutdown_extended_logging") + self._extended_logging_configured = False + + def is_logfile_handler_configured(self): + log.debug("Calling is_logfile_handler_configured") + return self._logfile_handler_configured + + def setup_logfile_handler( + self, log_path, log_level=None, **kwargs + ): # pylint: disable=unused-argument + """ + Set logfile and loglevel + """ + log.debug("Setting log file handler path to: %s", log_path) + log.debug("Setting log file handler log level to: %s", log_level) + self.log_file = log_path + self.log_level_logfile = log_level + self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs) + self._logfile_handler_configured = True + + def shutdown_logfile_handler(self): + log.debug("Calling shutdown_logfile_handler") + self._logfile_handler_configured = False + + def get_logging_options_dict(self): + log.debug("Calling get_logging_options_dict") + return self.config + + def set_logging_options_dict(self, opts): + log.debug("Calling set_logging_options_dict") + self._real_set_logging_options_dict(opts) + self.config = self._real_get_logging_options_dict() + log.debug("Logging options dict:\n%s", pprint.pformat(self.config)) + + def setup_log_granular_levels(self, opts): + log.debug("Calling setup_log_granular_levels") + + def setup_logging(self): + log.debug("Mocked setup_logging called") + # Wether daemonizing or not, either on the main process or on a separate process + # The log file is going to be configured. + # The console is the only handler not configured if daemonizing + + # These routines are what happens on salt._logging.setup_logging + opts = self.get_logging_options_dict() + + if ( + opts.get("configure_console_logger", True) + and not self.is_console_handler_configured() + ): + self.setup_console_handler( + log_level=opts["log_level"], + log_format=opts["log_fmt_console"], + date_format=opts["log_datefmt"], + ) + if ( + opts.get("configure_file_logger", True) + and not self.is_logfile_handler_configured() + ): + log_file_level = opts["log_level_logfile"] or opts["log_level"] + if log_file_level != "quiet": + self.setup_logfile_handler( + log_path=opts[opts["log_file_key"]], + log_level=log_file_level, + log_format=opts["log_fmt_logfile"], + date_format=opts["log_datefmt_logfile"], + max_bytes=opts["log_rotate_max_bytes"], + backup_count=opts["log_rotate_backup_count"], + user=opts["user"], + ) + if not self.is_extended_logging_configured(): + self.setup_extended_logging(opts) + self.setup_log_granular_levels(opts["log_granular_levels"]) + + def __enter__(self): + return self + + def __exit__(self, *_): + self._destroy() + + +# <----------- START TESTS -----------> + + +@pytest.fixture +def root_dir(tmp_path): + yield tmp_path / "parsers_tests_root_dir" + + +@pytest.fixture( + params=[ + "master", + "minion", + "proxyminion", + "syndic", + "saltcmd", + "saltcp", + "saltkey", + "saltcall", + "saltrun", + "saltssh", + "saltcloud", + "spm", + "saltapi", + ] +) +def log_cli_parser(request): + return request.param + + +@pytest.fixture +def default_config(log_cli_parser): + if log_cli_parser == "master": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "minion": + return salt.config.DEFAULT_MINION_OPTS.copy() + elif log_cli_parser == "proxyminion": + return { + **salt.config.DEFAULT_MINION_OPTS.copy(), + **salt.config.DEFAULT_PROXY_MINION_OPTS.copy(), + } + elif log_cli_parser == "syndic": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcmd": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcp": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltkey": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcall": + return salt.config.DEFAULT_MINION_OPTS.copy() + elif log_cli_parser == "saltrun": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltssh": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcloud": + return { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_CLOUD_OPTS.copy(), + } + elif log_cli_parser == "spm": + return { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_SPM_OPTS.copy(), + } + elif log_cli_parser == "saltapi": + return { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_API_OPTS.copy(), + } + + +@pytest.fixture +def parser(log_cli_parser): + param_map = { + "master": salt.utils.parsers.MasterOptionParser, + "minion": salt.utils.parsers.MinionOptionParser, + "proxyminion": salt.utils.parsers.ProxyMinionOptionParser, + "syndic": salt.utils.parsers.SyndicOptionParser, + "saltcmd": salt.utils.parsers.SaltCMDOptionParser, + "saltcp": salt.utils.parsers.SaltCPOptionParser, + "saltkey": salt.utils.parsers.SaltKeyOptionParser, + "saltcall": salt.utils.parsers.SaltCallOptionParser, + "saltrun": salt.utils.parsers.SaltRunOptionParser, + "saltssh": salt.utils.parsers.SaltSSHOptionParser, + "saltcloud": salt.utils.parsers.SaltCloudParser, + "spm": salt.utils.parsers.SPMParser, + "saltapi": salt.utils.parsers.SaltAPIParser, + } + return param_map[log_cli_parser] + + +@pytest.fixture +def config_func(log_cli_parser): + param_map = { + "master": "salt.config.master_config", + "minion": "salt.config.minion_config", + "proxyminion": "salt.config.proxy_config", + "syndic": "salt.config.syndic_config", + "saltcmd": "salt.config.client_config", + "saltcp": "salt.config.master_config", + "saltkey": "salt.config.client_config", + "saltcall": "salt.config.minion_config", + "saltrun": "salt.config.master_config", + "saltssh": "salt.config.master_config", + "saltcloud": "salt.config.cloud_config", + "spm": "salt.config.spm_config", + "saltapi": "salt.config.api_config", + } + return param_map[log_cli_parser] + + +@pytest.fixture +def log_file(tmp_path, logfile_config_setting_name): + return str(tmp_path / logfile_config_setting_name) + + +@pytest.fixture +def args(log_cli_parser): + if log_cli_parser in ("saltcmd", "saltssh"): + return ["foo", "bar.baz"] + elif log_cli_parser == "saltcp": + return ["foo", "bar", "baz"] + elif log_cli_parser in ("saltcall", "saltrun"): + return ["foo.bar"] + elif log_cli_parser == "saltcloud": + return ["-p", "foo", "bar"] + elif log_cli_parser == "spm": + return ["foo", "bar"] + return [] + + +@pytest.fixture +def loglevel_config_setting_name(): + return "log_level" + + +@pytest.fixture +def logfile_config_setting_name(log_cli_parser): + if log_cli_parser == "syndic": + return "syndic_log_file" + elif log_cli_parser == "saltkey": + return "key_logfile" + elif log_cli_parser == "saltssh": + return "ssh_log_file" + elif log_cli_parser == "spm": + return "spm_logfile" + elif log_cli_parser == "saltapi": + return "api_logfile" + return "log_file" + + +@pytest.fixture +def logfile_loglevel_config_setting_name(): + return "log_level_logfile" + + +@pytest.fixture +def testing_config(default_config, root_dir, logfile_config_setting_name, log_file): + _testing_config = default_config.copy() + _testing_config["root_dir"] = root_dir + for name in ("pki_dir", "cachedir"): + _testing_config[name] = name + _testing_config[logfile_config_setting_name] = log_file + return _testing_config + + +@pytest.fixture(autouse=True) +def log_impl(): + """ + Mock logger functions + """ + with LogImplMock() as _log_impl: + mocked_functions = {} + for name in dir(_log_impl): + if name.startswith("_"): + continue + func = getattr(_log_impl, name) + if not callable(func): + continue + mocked_functions[name] = func + + patcher = patch.multiple(salt._logging, **mocked_functions) + with patcher: + yield _log_impl + + +def test_get_log_level_cli( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match command-line specified value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + # Set log level in CLI + log_level = "critical" + args = ["--log-level", log_level] + args + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + console_log_level = getattr(instance.options, loglevel_config_setting_name) + + # Check console log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == log_level + # Check log file logger log level + assert log_impl.log_level_logfile == default_log_level + + +def test_get_log_level_config( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match the configured value + """ + # Set log level in config + log_level = "info" + testing_config.update({loglevel_config_setting_name: log_level}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + console_log_level = getattr(instance.options, loglevel_config_setting_name) + + # Check console log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == "error" + # Check log file logger log level + assert log_impl.log_level_logfile == log_level + + +def test_get_log_level_default( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match the default value + """ + # Set defaults + log_level = default_log_level = testing_config[loglevel_config_setting_name] + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + console_log_level = getattr(instance.options, loglevel_config_setting_name) + + # Check log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == "error" + # Check log file logger + assert log_impl.log_level_logfile == default_log_level + # Check help message + assert ( + "Default: '{}'.".format(default_log_level) + in instance.get_option("--log-level").help + ) + + +# log file configuration tests + + +def test_get_log_file_cli( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + log_file, + logfile_config_setting_name, +): + """ + Tests that log file match command-line specified value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file in CLI + log_file = "{}_cli.log".format(log_file) + args = ["--log-file", log_file] + args + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_file_option = getattr(instance.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + + +def test_get_log_file_config( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_config_setting_name, + log_file, +): + """ + Tests that log file match the configured value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file in config + log_file = "{}_config.log".format(log_file) + testing_config.update({logfile_config_setting_name: log_file}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_file_option = getattr(instance.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + + +def test_get_log_file_default( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_config_setting_name, + default_config, +): + """ + Tests that log file match the default value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + log_file = testing_config[logfile_config_setting_name] + default_log_file = default_config[logfile_config_setting_name] + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_file_option = getattr(instance.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + # Check help message + assert ( + "Default: '{}'.".format(default_log_file) + in instance.get_option("--log-file").help + ) + + +# log file log level configuration tests + + +def test_get_log_file_level_cli( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that file log level match command-line specified value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + # Set log file level in CLI + log_level_logfile = "error" + args = ["--log-file-level", log_level_logfile] + args + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == default_log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == default_log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_get_log_file_level_config( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that log file level match the configured value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file level in config + log_level_logfile = "info" + testing_config.update({logfile_loglevel_config_setting_name: log_level_logfile}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_get_log_file_level_default( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that log file level match the default value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + log_level = default_log_level + log_level_logfile = default_log_level + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + # Check help message + assert ( + "Default: '{}'.".format(default_log_level) + in instance.get_option("--log-file-level").help + ) + + +def test_get_console_log_level_with_file_log_level( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): # pylint: disable=invalid-name + """ + Tests that both console log level and log file level setting are working together + """ + log_level = "critical" + log_level_logfile = "debug" + + args = ["--log-file-level", log_level_logfile] + args + + testing_config.update({loglevel_config_setting_name: log_level}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_log_created( + testing_config, args, parser, config_func, logfile_config_setting_name, log_file +): + """ + Tests that log file is created + """ + testing_config.update({"log_file": str(log_file)}) + log_file_name = str(log_file) + if log_file_name.split(os.sep)[-1] != "log_file": + testing_config.update({log_file_name: str(log_file)}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + assert os.path.exists(str(log_file_name)) + + +def test_callbacks_uniqueness(parser): + """ + Test that the callbacks are only added once, no matter + how many instances of the parser we create + """ + mixin_container_names = ( + "_mixin_setup_funcs", + "_mixin_process_funcs", + "_mixin_after_parsed_funcs", + "_mixin_before_exit_funcs", + ) + instance = parser() + nums_1 = {} + for cb_container in mixin_container_names: + obj = getattr(instance, cb_container) + nums_1[cb_container] = len(obj) + + # The next time we instantiate the parser, the counts should be equal + instance = parser() + nums_2 = {} + for cb_container in mixin_container_names: + obj = getattr(instance, cb_container) + nums_2[cb_container] = len(obj) + assert nums_1 == nums_2 + + +def test_verify_log_warning_logged(args, config_func, testing_config, parser, caplog): + args = ["--log-level", "debug"] + args + with caplog.at_level(logging.DEBUG): + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + assert ( + "Insecure logging configuration detected! Sensitive data may be logged." + in caplog.messages + ) diff --git a/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py new file mode 100644 index 00000000000..fa99f26c081 --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py @@ -0,0 +1,216 @@ +""" +Tests the SaltfileMixIn. +""" + +import optparse +import shutil + +import pytest + +import salt.exceptions +import salt.utils.parsers +from tests.support.helpers import patched_environ +from tests.support.mock import patch + + +class MockSaltfileParser( + salt.utils.parsers.OptionParser, + salt.utils.parsers.SaltfileMixIn, + metaclass=salt.utils.parsers.OptionParserMeta, +): + def __init__(self, *args, **kwargs): + salt.utils.parsers.OptionParser.__init__(self, *args, **kwargs) + self.config = {} + + def _mixin_setup(self): + self.add_option( + "-l", + "--log-level", + dest="log_level", + default="warning", + help="The log level for salt.", + ) + group = self.output_options_group = optparse.OptionGroup( + self, "Output Options", "Configure your preferred output format." + ) + self.add_option_group(group) + + group.add_option( + "--out", + "--output", + dest="output", + help=( + "Print the output from the '{}' command using the " + "specified outputter.".format( + self.get_prog_name(), + ) + ), + ) + group.add_option( + "--out-file", + "--output-file", + dest="output_file", + default=None, + help="Write the output to the specified file.", + ) + group.add_option( + "--version-arg", + action="version", + help="Option to test no dest", + ) + + +@pytest.fixture +def parser(): + return MockSaltfileParser() + + +@pytest.fixture +def saltfile(tmp_path): + fp = tmp_path / "Saltfile" + fp.touch() + return fp + + +def test_saltfile_in_environment(parser, saltfile): + """ + Test setting the SALT_SALTFILE environment variable + """ + with patched_environ(SALT_SALTFILE=str(saltfile)): + parser.parse_args([]) + assert parser.options.saltfile == str(saltfile) + + +def test_saltfile_option(parser, saltfile): + """ + Test setting the saltfile via the CLI + """ + parser.parse_args(["--saltfile", str(saltfile)]) + assert parser.options.saltfile == str(saltfile) + + +def test_bad_saltfile_option(parser, saltfile, tmp_path): + """ + Test setting a bad saltfile via the CLI + """ + with pytest.raises(SystemExit): + parser.parse_args(["--saltfile", str(tmp_path / "fake_dir")]) + + +def test_saltfile_cwd(parser, saltfile, tmp_path): + """ + Test using a saltfile in the cwd + """ + with patch("os.getcwd", return_value=str(tmp_path)) as cwd_mock: + parser.parse_args([]) + assert parser.options.saltfile == str(saltfile) + cwd_mock.assert_called_once() + + +def test_saltfile_cwd_doesnt_exist(parser, saltfile, tmp_path): + """ + Test using a saltfile in the cwd that doesn't exist + """ + with patch("os.getcwd", return_value=str(tmp_path / "fake_dir")) as cwd_mock: + parser.parse_args([]) + assert parser.options.saltfile is None + + +def test_saltfile_user_home(parser, saltfile, tmp_path): + """ + Test using a saltfile in ~/.salt/ + """ + fake_dir = tmp_path / "fake_dir" + fake_dir.mkdir() + with patch("os.getcwd", return_value=str(fake_dir)) as cwd_mock: + with patch("os.path.expanduser", return_value=str(tmp_path)) as eu_mock: + salt_subdir = tmp_path / ".salt" + salt_subdir.mkdir() + dest = str(salt_subdir / "Saltfile") + shutil.copy(str(saltfile), dest) + parser.parse_args([]) + assert parser.options.saltfile == dest + cwd_mock.assert_called_once() + eu_mock.assert_called_with("~") + + +def test_bad_saltfile(parser, saltfile): + """ + Test a saltfile with bad configuration + """ + contents = """ + bad "yaml": + - this is: bad yaml + - bad yaml=data: + - {"bad": yaml, "data": "yaml"} + """ + saltfile.write_text(contents) + # It raises two errors, let's catch them both + with pytest.raises(SystemExit): + with pytest.raises(salt.exceptions.SaltConfigurationError): + parser.parse_args(["--saltfile", str(saltfile)]) + + +def test_saltfile_without_prog_name(parser, saltfile): + """ + Test a saltfile with valid yaml but without the program name in it + """ + contents = "good: yaml" + saltfile.write_text(contents) + # This should just run cleanly + parser.parse_args(["--saltfile", str(saltfile)]) + + +def test_saltfile(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + log_level: debug + output: json + """ + saltfile.write_text(contents) + parser.parse_args(["--saltfile", str(saltfile)]) + print(parser.option_list) + assert parser.options.log_level == "debug" + assert parser.options.output == "json" + + +def test_saltfile_unusual_option(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + go: birds + """ + saltfile.write_text(contents) + parser.parse_args(["--saltfile", str(saltfile)]) + assert parser.options.go == "birds" + + +def test_saltfile_cli_override(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + log_level: debug + output: json + output_file: /fake/file + """ + saltfile.write_text(contents) + parser.parse_args( + [ + "--saltfile", + str(saltfile), + "--log-level", + "info", + "--out-file", + "/still/fake/file", + ] + ) + assert parser.options.log_level == "info" + assert parser.options.output == "json" + assert parser.options.output_file == "/still/fake/file" diff --git a/tests/pytests/unit/utils/test_cloud.py b/tests/pytests/unit/utils/test_cloud.py index 2d18d944b33..00c4798812e 100644 --- a/tests/pytests/unit/utils/test_cloud.py +++ b/tests/pytests/unit/utils/test_cloud.py @@ -13,6 +13,13 @@ import tempfile import pytest +try: + from smbprotocol.exceptions import CannotDelete + + HAS_PSEXEC = True +except ImportError: + HAS_PSEXEC = False + import salt.utils.cloud as cloud from salt.exceptions import SaltCloudException from salt.utils.cloud import __ssh_gateway_arguments as ssh_gateway_arguments @@ -208,7 +215,8 @@ def test_deploy_windows_custom_port(): mock.assert_called_once_with("test", "Administrator", None, 1234) -def test_run_psexec_command_cleanup_lingering_paexec(): +@pytest.mark.skipif(not HAS_PSEXEC, reason="Missing SMB Protocol Library") +def test_run_psexec_command_cleanup_lingering_paexec(caplog): pytest.importorskip("pypsexec.client", reason="Requires PyPsExec") mock_psexec = patch("salt.utils.cloud.PsExecClient", autospec=True) mock_scmr = patch("salt.utils.cloud.ScmrService", autospec=True) @@ -232,11 +240,32 @@ def test_run_psexec_command_cleanup_lingering_paexec(): ) mock_client.return_value.cleanup.assert_called_once() + # Testing handling an error when it can't delete the PAexec binary + with mock_scmr, mock_rm_svc, mock_psexec as mock_client: + mock_client.return_value.session = MagicMock(username="Gary") + mock_client.return_value.connection = MagicMock(server_name="Krabbs") + mock_client.return_value.run_executable.return_value = ( + "Sandy", + "MermaidMan", + "BarnicleBoy", + ) + mock_client.return_value.cleanup = MagicMock(side_effect=CannotDelete()) + + cloud.run_psexec_command( + "spongebob", + "squarepants", + "patrick", + "squidward", + "plankton", + ) + assert "Exception cleaning up PAexec:" in caplog.text + mock_client.return_value.disconnect.assert_called_once() + @pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.") def test_deploy_windows_programdata(): """ - Test deploy_windows with a custom port + Test deploy_windows to ProgramData """ mock_true = MagicMock(return_value=True) mock_tuple = MagicMock(return_value=(0, 0, 0)) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index c5f976f6749..12d545b0154 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1,8 +1,1597 @@ +import logging +import socket +import textwrap + +import pytest + +import salt.exceptions import salt.utils.network +import salt.utils.network as network +from salt._compat import ipaddress +from tests.support.mock import MagicMock, create_autospec, mock_open, patch + +pytestmark = [ + pytest.mark.skip_on_windows, +] + + +log = logging.getLogger(__name__) + +LINUX = """\ +eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af + inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0 + inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link + UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 + RX packets:643363 errors:0 dropped:0 overruns:0 frame:0 + TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0 + collisions:0 txqueuelen:1000 + RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB) + +lo Link encap:Local Loopback + inet addr:127.0.0.1 Mask:255.0.0.0 + inet6 addr: ::1/128 Scope:Host + UP LOOPBACK RUNNING MTU:65536 Metric:1 + RX packets:548901 errors:0 dropped:0 overruns:0 frame:0 + TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0 + collisions:0 txqueuelen:0 + RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB) +""" + +FREEBSD = """ +em0: flags=8843 metric 0 mtu 1500 + options=4219b + ether 00:30:48:ff:ff:ff + inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255 + inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63 + media: Ethernet autoselect (1000baseT ) + status: active +em1: flags=8c02 metric 0 mtu 1500 + options=4219b + ether 00:30:48:aa:aa:aa + media: Ethernet autoselect + status: no carrier +plip0: flags=8810 metric 0 mtu 1500 +lo0: flags=8049 metric 0 mtu 16384 + options=3 + inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8 + inet6 ::1 prefixlen 128 + inet 127.0.0.1 netmask 0xff000000 + nd6 options=3 +tun0: flags=8051 metric 0 mtu 1500 + options=80000 + inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff + Opened by PID 1964 +""" + +SOLARIS = """\ +lo0: flags=2001000849 mtu 8232 index 1 + inet 127.0.0.1 netmask ff000000 +net0: flags=100001100943 mtu 1500 index 2 + inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63 +ilbint0: flags=110001100843 mtu 1500 index 3 + inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255 +ilbext0: flags=110001100843 mtu 1500 index 4 + inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31 +ilbext0:1: flags=110001100843 mtu 1500 index 4 + inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31 +vpn0: flags=1000011008d1 mtu 1480 index 5 + inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 + tunnel hop limit 64 + inet 10.6.0.14 --> 10.6.0.15 netmask ff000000 +lo0: flags=2002000849 mtu 8252 index 1 + inet6 ::1/128 +net0: flags=120002004941 mtu 1500 index 2 + inet6 fe80::221:9bff:fefd:2a22/10 +ilbint0: flags=120002000840 mtu 1500 index 3 + inet6 ::/0 +ilbext0: flags=120002000840 mtu 1500 index 4 + inet6 ::/0 +vpn0: flags=120002200850 mtu 1480 index 5 + inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 + tunnel hop limit 64 + inet6 ::/0 --> fe80::b2d6:7c10 +""" + +NETBSD = """\ +vioif0: flags=0x8943 mtu 1500 + ec_capabilities=1 + ec_enabled=0 + address: 00:a0:98:e6:83:18 + inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0 + inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1 +lo0: flags=0x8049 mtu 33624 + inet 127.0.0.1/8 flags 0x0 + inet6 ::1/128 flags 0x20 + inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2 +""" + +FREEBSD_SOCKSTAT = """\ +USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS +root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506 +""" + +FREEBSD_SOCKSTAT_WITH_FAT_PID = """\ +USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS +salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 +""" + +OPENBSD_NETSTAT = """\ +Active Internet connections +Proto Recv-Q Send-Q Local Address Foreign Address (state) +tcp 0 0 127.0.0.1.61115 127.0.0.1.4506 ESTABLISHED +""" + +LINUX_NETLINK_SS_OUTPUT = """\ +State Recv-Q Send-Q Local Address:Port Peer Address:Port +TIME-WAIT 0 0 [::1]:8009 [::1]:40368 +LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* +ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 +ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 +ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 +ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 +""" + +IPV4_SUBNETS = { + True: ("10.10.0.0/24",), + False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"), +} +IPV6_SUBNETS = { + True: ("::1/128",), + False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"), +} + + +_ip = ipaddress.ip_address + + +@pytest.fixture(scope="module") +def linux_interfaces_dict(): + return { + "eth0": { + "hwaddr": "e0:3f:49:85:6a:af", + "inet": [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ], + "inet6": [ + { + "address": "fe80::e23f:49ff:fe85:6aaf", + "prefixlen": "64", + "scope": "link", + } + ], + "up": True, + }, + "lo": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], + "up": True, + }, + } + + +@pytest.fixture(scope="module") +def freebsd_interfaces_dict(): + return { + "": {"up": False}, + "em0": { + "hwaddr": "00:30:48:ff:ff:ff", + "inet": [ + { + "address": "10.10.10.250", + "broadcast": "10.10.10.255", + "netmask": "255.255.255.224", + }, + { + "address": "10.10.10.56", + "broadcast": "10.10.10.63", + "netmask": "255.255.255.192", + }, + ], + "up": True, + }, + "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [ + {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, + {"address": "::1", "prefixlen": "128", "scope": None}, + ], + "up": True, + }, + "plip0": {"up": False}, + "tun0": { + "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], + "up": True, + }, + } + + +def test_sanitize_host_ip(): + ret = network.sanitize_host("10.1./2.$3") + assert ret == "10.1.2.3" + + +def test_sanitize_host_name(): + """ + Should not remove the underscore + """ + ret = network.sanitize_host("foo_bar") + assert ret == "foo_bar" + + +def test_host_to_ips(): + """ + NOTE: When this test fails it's usually because the IP address has + changed. In these cases, we just need to update the IP address in the + assertion. + """ + + def getaddrinfo_side_effect(host, *args): + if host == "github.com": + return [ + (2, 1, 6, "", ("192.30.255.112", 0)), + (2, 1, 6, "", ("192.30.255.113", 0)), + ] + if host == "ipv6host.foo": + return [ + (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), + ] + raise socket.gaierror(-2, "Name or service not known") + + getaddrinfo_mock = MagicMock(side_effect=getaddrinfo_side_effect) + with patch.object(socket, "getaddrinfo", getaddrinfo_mock): + # Test host that can be resolved + ret = network.host_to_ips("github.com") + assert ret == ["192.30.255.112", "192.30.255.113"] + + # Test ipv6 + ret = network.host_to_ips("ipv6host.foo") + assert ret == ["2001:a71::1"] + # Test host that can't be resolved + ret = network.host_to_ips("someothersite.com") + assert ret is None + + +def test_generate_minion_id(): + assert network.generate_minion_id() + + +def test__generate_minion_id_with_unicode_in_etc_hosts(): + """ + Test that unicode in /etc/hosts doesn't raise an error when + _generate_minion_id() helper is called to gather the hosts. + """ + content = textwrap.dedent( + """\ + # 以下为主机名解析 + ## ccc + 127.0.0.1 localhost thisismyhostname # 本机 + """ + ) + fopen_mock = mock_open(read_data={"/etc/hosts": content}) + with patch("salt.utils.files.fopen", fopen_mock): + assert "thisismyhostname" in network._generate_minion_id() + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", True), + ("0.9.800.1000", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ip(addr, expected): + assert network.is_ip(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", True), + ("10.100.1", False), + ("2001:db8:0:1:1:1:1:1", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ipv4(addr, expected): + assert network.is_ipv4(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("2001:db8:0:1:1:1:1:1", True), + ("0:0:0:0:0:0:0:1", True), + ("::1", True), + ("::", True), + ("2001:0db8:85a3:0000:0000:8a2e:0370:7334", True), + ("2001:0db8:85a3::8a2e:0370:7334", True), + ("2001:0db8:0370:7334", False), + ("2001:0db8:::0370:7334", False), + ("10.0.1.2", False), + ("2001.0db8.85a3.0000.0000.8a2e.0370.7334", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ipv6(addr, expected): + assert network.is_ipv6(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("2001:db8:0:1:1:1:1:1", "2001:db8:0:1:1:1:1:1"), + ("0:0:0:0:0:0:0:1", "::1"), + ("::1", "::1"), + ("::", "::"), + ("2001:0db8:85a3:0000:0000:8a2e:0370:7334", "2001:db8:85a3::8a2e:370:7334"), + ("2001:0db8:85a3::8a2e:0370:7334", "2001:db8:85a3::8a2e:370:7334"), + ("2001:67c:2e8::/48", "2001:67c:2e8::/48"), + ), +) +def test_ipv6(addr, expected): + assert network.ipv6(addr) == expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("127.0.1.1", True), + ("::1", True), + ("10.0.1.2", False), + ("2001:db8:0:1:1:1:1:1", False), + ), +) +def test_is_loopback(addr, expected): + assert network.is_loopback(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", (_ip("10.10.0.3").compressed, None)), + ("10.10.0.3:1234", (_ip("10.10.0.3").compressed, 1234)), + ( + "2001:0db8:85a3::8a2e:0370:7334", + ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + None, + ), + ), + ( + "[2001:0db8:85a3::8a2e:0370:7334]:1234", + ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + 1234, + ), + ), + ("2001:0db8:85a3::7334", (_ip("2001:0db8:85a3::7334").compressed, None)), + ( + "[2001:0db8:85a3::7334]:1234", + ( + _ip("2001:0db8:85a3::7334").compressed, + 1234, + ), + ), + ), +) +def test_parse_host_port_good(addr, expected): + assert network.parse_host_port(addr) == expected + + +@pytest.mark.parametrize( + "addr", + ( + "10.10.0.3/24", + "10.10.0.3::1234", + "2001:0db8:0370:7334", + "2001:0db8:0370::7334]:1234", + "2001:0db8:0370:0:a:b:c:d:1234", + "host name", + "host name:1234", + "10.10.0.3:abcd", + ), +) +def test_parse_host_port_bad_raises_value_error(addr): + with pytest.raises(ValueError): + network.parse_host_port(addr) + + +@pytest.mark.parametrize( + "host", + ( + ( + { + "host": "10.10.0.3", + "port": "", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + } + ), + ( + { + "host": "10.10.0.3", + "port": "1234", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + } + ), + ( + { + "host": "2001:0db8:85a3::8a2e:0370:7334", + "port": "", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + } + ), + ( + { + "host": "2001:0db8:85a3::8a2e:370:7334", + "port": "1234", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + } + ), + ( + { + "host": "salt-master", + "port": "1234", + "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], + "ret": "127.0.0.1", + } + ), + ), +) +def test_dns_check(host): + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, return_value=host["mocked"]), + ): + with patch("socket.socket", create_autospec(socket.socket)): + ret = network.dns_check(host["host"], host["port"]) + assert ret == host["ret"] + + +def test_dns_check_ipv6_filter(): + # raise exception to skip everything after the getaddrinfo call + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, side_effect=Exception), + ) as getaddrinfo: + for ipv6, param in [ + (None, socket.AF_UNSPEC), + (True, socket.AF_INET6), + (False, socket.AF_INET), + ]: + with pytest.raises(Exception): + network.dns_check("foo", "1", ipv6=ipv6) + getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM) + + +def test_dns_check_errors(): + with patch.object( + socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[]) + ): + with pytest.raises( + salt.exceptions.SaltSystemExit, + match="DNS lookup or connection check of 'foo' failed.", + ) as exc_info: + network.dns_check("foo", "1") + + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, side_effect=TypeError), + ): + with pytest.raises( + salt.exceptions.SaltSystemExit, match="Invalid or unresolveable address" + ) as exc_info2: + network.dns_check("foo", "1") + + +def test_test_addrs(): + # subset of real data from getaddrinfo against saltstack.com + addrinfo = [ + (30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), + (30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), + (30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), + (30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), + (2, 1, 6, "", ("13.35.99.52", 0)), + (2, 2, 17, "", ("13.35.99.85", 0)), + (2, 1, 6, "", ("13.35.99.85", 0)), + (2, 2, 17, "", ("13.35.99.122", 0)), + ] + with patch("socket.socket", create_autospec(socket.socket)) as s: + # we connect to the first address + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 1 + assert addrs[0] == addrinfo[0][4][0] + + # the first lookup fails, succeeds on next check + s.side_effect = [socket.error, MagicMock()] + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 1 + assert addrs[0] == addrinfo[2][4][0] + + # attempt to connect to resolved address with default timeout + s.side_effect = socket.error + addrs = network._test_addrs(addrinfo, 80) + assert not len(addrs) == 0 + + # nothing can connect, but we've eliminated duplicates + s.side_effect = socket.error + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 5 + + +def test_is_subnet(): + for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS): + for item in subnet_data[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_subnet(item) + for item in subnet_data[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_subnet(item) + + +def test_is_ipv4_subnet(): + for item in IPV4_SUBNETS[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_ipv4_subnet(item) + for item in IPV4_SUBNETS[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_ipv4_subnet(item) + + +def test_is_ipv6_subnet(): + for item in IPV6_SUBNETS[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_ipv6_subnet(item) is True + for item in IPV6_SUBNETS[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert network.is_ipv6_subnet(item) is False + + +@pytest.mark.parametrize( + "addr,expected", + ( + (24, "255.255.255.0"), + (21, "255.255.248.0"), + (17, "255.255.128.0"), + (9, "255.128.0.0"), + (36, ""), + ("lol", ""), + ), +) +def test_cidr_to_ipv4_netmask(addr, expected): + assert network.cidr_to_ipv4_netmask(addr) == expected + + +def test_number_of_set_bits_to_ipv4_netmask(): + set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) + assert set_bits_to_netmask == "255.255.255.0" + set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) + assert set_bits_to_netmask == "255.255.100.0" + + +@pytest.mark.parametrize( + "hex_num,inversion,expected", + ( + ("0x4A7D2B63", False, "74.125.43.99"), + ("0x4A7D2B63", True, "99.43.125.74"), + ("00000000000000000000FFFF7F000001", False, "127.0.0.1"), + ("0000000000000000FFFF00000100007F", True, "127.0.0.1"), + ("20010DB8000000000000000000000000", False, "2001:db8::"), + ("B80D0120000000000000000000000000", True, "2001:db8::"), + ), +) +def test_hex2ip(hex_num, inversion, expected): + assert network.hex2ip(hex_num, inversion) == expected + + +def test_interfaces_ifconfig_linux(linux_interfaces_dict): + interfaces = network._interfaces_ifconfig(LINUX) + assert interfaces == linux_interfaces_dict + + +def test_interfaces_ifconfig_freebsd(freebsd_interfaces_dict): + interfaces = network._interfaces_ifconfig(FREEBSD) + assert interfaces == freebsd_interfaces_dict + + +def test_interfaces_ifconfig_solaris(): + with patch("salt.utils.platform.is_sunos", return_value=True): + expected_interfaces = { + "ilbint0": { + "inet6": [], + "inet": [ + { + "broadcast": "10.6.0.255", + "netmask": "255.255.255.0", + "address": "10.6.0.11", + } + ], + "up": True, + }, + "lo0": { + "inet6": [{"prefixlen": "128", "address": "::1"}], + "inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}], + "up": True, + }, + "ilbext0": { + "inet6": [], + "inet": [ + { + "broadcast": "10.10.11.31", + "netmask": "255.255.255.224", + "address": "10.10.11.11", + }, + { + "broadcast": "10.10.11.31", + "netmask": "255.255.255.224", + "address": "10.10.11.12", + }, + ], + "up": True, + }, + "vpn0": { + "inet6": [], + "inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}], + "up": True, + }, + "net0": { + "inet6": [{"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"}], + "inet": [ + { + "broadcast": "10.10.10.63", + "netmask": "255.255.255.224", + "address": "10.10.10.38", + } + ], + "up": True, + }, + } + interfaces = network._interfaces_ifconfig(SOLARIS) + assert interfaces == expected_interfaces + + +def test_interfaces_ifconfig_netbsd(): + expected_interfaces = { + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "fe80::1", "prefixlen": "64", "scope": "lo0"}], + "up": True, + }, + "vioif0": { + "hwaddr": "00:a0:98:e6:83:18", + "inet": [ + { + "address": "192.168.1.80", + "broadcast": "192.168.1.255", + "netmask": "255.255.255.0", + } + ], + "inet6": [ + { + "address": "fe80::2a0:98ff:fee6:8318", + "prefixlen": "64", + "scope": "vioif0", + } + ], + "up": True, + }, + } + interfaces = network._netbsd_interfaces_ifconfig(NETBSD) + assert interfaces == expected_interfaces + + +def test_freebsd_remotes_on(): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_freebsd", return_value=True): + with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): + remotes = network._freebsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_freebsd_remotes_on_with_fat_pid(): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_freebsd", return_value=True): + with patch( + "subprocess.check_output", + return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, + ): + remotes = network._freebsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_netlink_tool_remote_on_a(): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_linux", return_value=True): + with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4506", "local_port") + assert remotes == {"192.168.122.177", "::ffff:127.0.0.1"} + + +def test_netlink_tool_remote_on_b(): + with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4505", "remote_port") + assert remotes == {"127.0.0.1", "::ffff:1.2.3.4"} + + +def test_openbsd_remotes_on(): + with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT): + remotes = network._openbsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_openbsd_remotes_on_issue_61966(): + """ + Test that the command output is correctly converted to string before + treating it as such + """ + with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT.encode()): + remotes = network._openbsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_generate_minion_id_distinct(): + """ + Test if minion IDs are distinct in the pool. + """ + with patch("platform.node", MagicMock(return_value="nodename")), patch( + "socket.gethostname", MagicMock(return_value="hostname") + ), patch( + "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "hostname.domainname.blank", + "nodename", + "hostname", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_127_name(): + """ + Test if minion IDs can be named 127.foo + """ + with patch("platform.node", MagicMock(return_value="127")), patch( + "socket.gethostname", MagicMock(return_value="127") + ), patch("socket.getfqdn", MagicMock(return_value="127.domainname.blank")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "127.domainname.blank", + "127", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_127_name_startswith(): + """ + Test if minion IDs can be named starting from "127" + """ + expected = [ + "127890.domainname.blank", + "127890", + "1.2.3.4", + "5.6.7.8", + ] + with patch("platform.node", MagicMock(return_value="127890")), patch( + "socket.gethostname", MagicMock(return_value="127890") + ), patch( + "socket.getfqdn", MagicMock(return_value="127890.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == expected + + +def test_generate_minion_id_duplicate(): + """ + Test if IP addresses in the minion IDs are distinct in the pool + """ + expected = ["hostname", "1.2.3.4"] + with patch("platform.node", MagicMock(return_value="hostname")), patch( + "socket.gethostname", MagicMock(return_value="hostname") + ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network._generate_minion_id() == expected + + +def test_generate_minion_id_platform_used(): + """ + Test if platform.node is used for the first occurrence. + The platform.node is most common hostname resolver before anything else. + """ + with patch( + "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") + ), patch("socket.gethostname", MagicMock(return_value="hostname")), patch( + "socket.getfqdn", MagicMock(return_value="") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "very.long.and.complex.domain.name" + + +def test_generate_minion_id_platform_localhost_filtered(): + """ + Test if localhost is filtered from the first occurrence. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="pick.me") + ), patch( + "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "hostname.domainname.blank" + + +def test_generate_minion_id_platform_localhost_filtered_all(): + """ + Test if any of the localhost is filtered from everywhere. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "1.2.3.4" + + +def test_generate_minion_id_platform_localhost_only(): + """ + Test if there is no other choice but localhost. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "localhost" + + +def test_generate_minion_id_platform_fqdn(): + """ + Test if fqdn is picked up. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "pick.me" + + +def test_generate_minion_id_platform_localhost_addrinfo(): + """ + Test if addinfo is picked up. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "pick.me" + + +def test_generate_minion_id_platform_ip_addr_only(): + """ + Test if IP address is the only what is used as a Minion ID in case no DNS name. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "1.2.3.4" + + +def test_gen_mac(): + expected_mac = "00:16:3E:01:01:01" + with patch("random.randint", return_value=1) as random_mock: + assert random_mock.return_value == 1 + ret = network.gen_mac("00:16:3E") + assert ret == expected_mac + + +@pytest.mark.parametrize( + "mac_addr", + ( + ("31337"), + ("0001020304056"), + ("00:01:02:03:04:056"), + ("a0:b0:c0:d0:e0:fg"), + ), +) +def test_mac_str_to_bytes_exceptions(mac_addr): + with pytest.raises(ValueError): + network.mac_str_to_bytes(mac_addr) + + +def test_mac_str_to_bytes(): + assert network.mac_str_to_bytes("100806040200") == b"\x10\x08\x06\x04\x02\x00" + assert network.mac_str_to_bytes("f8e7d6c5b4a3") == b"\xf8\xe7\xd6\xc5\xb4\xa3" + + +@pytest.mark.slow_test +def test_generate_minion_id_with_long_hostname(): + """ + Validate the fix for: + + https://github.com/saltstack/salt/issues/51160 + """ + long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" + with patch("socket.gethostname", MagicMock(return_value=long_name)): + # An exception is raised if unicode is passed to socket.getfqdn + minion_id = network.generate_minion_id() + assert minion_id != "" + + +def test_filter_by_networks_with_no_filter(): + ips = ["10.0.123.200", "10.10.10.10"] + with pytest.raises(TypeError): + network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter + + +def test_filter_by_networks_empty_filter(): + ips = ["10.0.123.200", "10.10.10.10"] + assert network.filter_by_networks(ips, []) == [] + + +def test_filter_by_networks_ips_list(): + ips = [ + "10.0.123.200", + "10.10.10.10", + "193.124.233.5", + "fe80::d210:cf3f:64e7:5423", + ] + expected = [ + "10.0.123.200", + "10.10.10.10", + "fe80::d210:cf3f:64e7:5423", + ] + networks = ["10.0.0.0/8", "fe80::/64"] + assert network.filter_by_networks(ips, networks) == expected + + +def test_filter_by_networks_interfaces_dict(): + interfaces = { + "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], + "eth0": [ + "2001:0DB8:0:CD30:123:4567:89AB:CDEF", + "192.168.1.101", + "10.0.123.201", + ], + } + expected = { + "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], + "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], + } + ret = network.filter_by_networks(interfaces, ["192.168.1.0/24", "2001:db8::/48"]) + assert ret == expected + + +def test_filter_by_networks_catch_all(): + ips = [ + "10.0.123.200", + "10.10.10.10", + "193.124.233.5", + "fe80::d210:cf3f:64e7:5423", + ] + assert network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) == ips + + +def test_ip_networks(): + # We don't need to test with each platform's ifconfig/iproute2 output, + # since this test isn't testing getting the interfaces. We already have + # tests for that. + interface_data = network._interfaces_ifconfig(LINUX) + + # Without loopback + ret = network.ip_networks(interface_data=interface_data) + assert ret == ["10.10.8.0/22"] + # Without loopback, specific interface + ret = network.ip_networks(interface="eth0", interface_data=interface_data) + assert ret == ["10.10.8.0/22"] + # Without loopback, multiple specific interfaces + ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) + assert ret == ["10.10.8.0/22"] + # Without loopback, specific interface (not present) + ret = network.ip_networks(interface="eth1", interface_data=interface_data) + assert ret == [] + # With loopback + ret = network.ip_networks(include_loopback=True, interface_data=interface_data) + assert ret == ["10.10.8.0/22", "127.0.0.0/8"] + # With loopback, specific interface + ret = network.ip_networks( + interface="eth0", include_loopback=True, interface_data=interface_data + ) + assert ret == ["10.10.8.0/22"] + # With loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", include_loopback=True, interface_data=interface_data + ) + assert ret == ["10.10.8.0/22", "127.0.0.0/8"] + # With loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", include_loopback=True, interface_data=interface_data + ) + assert ret == [] + + # Verbose, without loopback + ret = network.ip_networks(verbose=True, interface_data=interface_data) + expected_ret1 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret1 + + # Verbose, without loopback, specific interface + ret = network.ip_networks( + interface="eth0", verbose=True, interface_data=interface_data + ) + expected_ret2 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret2 + + # Verbose, without loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", verbose=True, interface_data=interface_data + ) + expected_ret3 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret3 + + # Verbose, without loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", verbose=True, interface_data=interface_data + ) + assert ret == {} + # Verbose, with loopback + ret = network.ip_networks( + include_loopback=True, verbose=True, interface_data=interface_data + ) + expected_ret4 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + "127.0.0.0/8": { + "prefixlen": 8, + "netmask": "255.0.0.0", + "num_addresses": 16777216, + "address": "127.0.0.0", + }, + } + assert ret == expected_ret4 + + # Verbose, with loopback, specific interface + ret = network.ip_networks( + interface="eth0", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret5 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret5 + + # Verbose, with loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret6 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + "127.0.0.0/8": { + "prefixlen": 8, + "netmask": "255.0.0.0", + "num_addresses": 16777216, + "address": "127.0.0.0", + }, + } + assert ret == expected_ret6 + + # Verbose, with loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == {} + + +def test_ip_networks6(): + # We don't need to test with each platform's ifconfig/iproute2 output, + # since this test isn't testing getting the interfaces. We already have + # tests for that. + interface_data = network._interfaces_ifconfig(LINUX) + + # Without loopback + ret = network.ip_networks6(interface_data=interface_data) + assert ret == ["fe80::/64"] + # Without loopback, specific interface + ret = network.ip_networks6(interface="eth0", interface_data=interface_data) + assert ret == ["fe80::/64"] + # Without loopback, multiple specific interfaces + ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) + assert ret == ["fe80::/64"] + # Without loopback, specific interface (not present) + ret = network.ip_networks6(interface="eth1", interface_data=interface_data) + assert ret == [] + # With loopback + ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) + assert ret == ["::1/128", "fe80::/64"] + # With loopback, specific interface + ret = network.ip_networks6( + interface="eth0", include_loopback=True, interface_data=interface_data + ) + assert ret == ["fe80::/64"] + # With loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", include_loopback=True, interface_data=interface_data + ) + assert ret == ["::1/128", "fe80::/64"] + # With loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", include_loopback=True, interface_data=interface_data + ) + assert ret == [] + + # Verbose, without loopback + ret = network.ip_networks6(verbose=True, interface_data=interface_data) + expected_ret1 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret1 + + # Verbose, without loopback, specific interface + ret = network.ip_networks6( + interface="eth0", verbose=True, interface_data=interface_data + ) + expected_ret2 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret2 + + # Verbose, without loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", verbose=True, interface_data=interface_data + ) + expected_ret3 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret3 + + # Verbose, without loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", verbose=True, interface_data=interface_data + ) + assert ret == {} + + # Verbose, with loopback + ret = network.ip_networks6( + include_loopback=True, verbose=True, interface_data=interface_data + ) + expected_ret4 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + "::1/128": { + "prefixlen": 128, + "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + "num_addresses": 1, + "address": "::1", + }, + } + assert ret == expected_ret4 + + # Verbose, with loopback, specific interface + ret = network.ip_networks6( + interface="eth0", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret5 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret5 + + # Verbose, with loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret6 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + "::1/128": { + "prefixlen": 128, + "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + "num_addresses": 1, + "address": "::1", + }, + } + assert ret == expected_ret6 + + # Verbose, with loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == {} + + +def test_get_fqhostname_return(): + """ + Test if proper hostname is used when RevDNS differ from hostname + """ + with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( + "socket.getfqdn", + MagicMock(return_value="very.long.and.complex.domain.name"), + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ): + assert network.get_fqhostname() == "hostname" + + +def test_get_fqhostname_return_empty_hostname(): + """ + Test if proper hostname is used when hostname returns empty string + """ + host = "hostname" + with patch("socket.gethostname", MagicMock(return_value=host)), patch( + "socket.getfqdn", + MagicMock(return_value="very.long.and.complex.domain.name"), + ), patch( + "socket.getaddrinfo", + MagicMock( + return_value=[ + (2, 3, 0, host, ("127.0.1.1", 0)), + (2, 3, 0, "", ("127.0.1.1", 0)), + ] + ), + ): + assert network.get_fqhostname() == host + + +@pytest.mark.parametrize( + "addr,expected,strip", + ( + ("127.0.0.1", "127.0.0.1", False), + ("[::1]", "::1", True), + ("::1", "[::1]", False), + ("[::1]", "[::1]", False), + (ipaddress.ip_address("127.0.0.1"), "127.0.0.1", False), + ), +) +def test_ip_bracket(addr, expected, strip): + assert network.ip_bracket(addr, strip=strip) == expected def test_junos_ifconfig_output_parsing(): - ret = salt.utils.network._junos_interfaces_ifconfig( - "inet mtu 0 local=" + " " * 3456 - ) + ret = network._junos_interfaces_ifconfig("inet mtu 0 local=" + " " * 3456) assert ret == {"inet": {"up": False}} + + +def test_isportopen_false(): + ret = network.isportopen("127.0.0.1", "66000") + assert ret is False + + +def test_isportopen(): + ret = network.isportopen("127.0.0.1", "22") + assert ret == 0 + + +def test_get_socket(): + ret = network.get_socket("127.0.0.1") + assert ret.family == socket.AF_INET + assert ret.type == socket.SOCK_STREAM + + ret = network.get_socket("2001:a71::1") + assert ret.family == socket.AF_INET6 + assert ret.type == socket.SOCK_STREAM + + +def test_ip_to_host(grains): + ret = network.ip_to_host("127.0.0.1") + if grains["oscodename"] == "Photon": + # Photon returns this for IPv4 + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" + + ret = network.ip_to_host("2001:a71::1") + assert ret is None + + ret = network.ip_to_host("::1") + if grains["os"] == "Amazon": + assert ret == "localhost6" + elif grains["os_family"] == "Debian": + if grains["osmajorrelease"] == 12: + assert ret == "localhost" + else: + assert ret == "ip6-localhost" + elif grains["os_family"] == "RedHat": + if grains["oscodename"] == "Photon": + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" + elif grains["os_family"] == "Arch": + if grains.get("osmajorrelease", None) is None: + # running doesn't have osmajorrelease grains + assert ret == "localhost" + else: + assert ret == "ip6-localhost" + else: + assert ret == "localhost" + + +@pytest.mark.parametrize( + "addr,fmtr,expected", + ( + ("192.168.0.115", "prefixlen", "/24"), + ("192.168.1.80", "prefixlen", "/24"), + ("10.10.10.250", "prefixlen", "/8"), + ("192.168.0.115", "netmask", "255.255.255.0"), + ("192.168.1.80", "netmask", "255.255.255.0"), + ("10.10.10.250", "netmask", "255.0.0.0"), + ), +) +def test_natural_ipv4_netmask(addr, fmtr, expected): + assert network.natural_ipv4_netmask(addr, fmt=fmtr) == expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("127.0", "127.0.0.0"), + ("192.168.3", "192.168.3.0"), + ("10.209", "10.209.0.0"), + ), +) +def test_rpad_ipv4_network(addr, expected): + assert network.rpad_ipv4_network(addr) == expected + + +def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + hw_addrs = network.hw_addr("eth0") + assert hw_addrs == "e0:3f:49:85:6a:af" + + with patch( + "salt.utils.network.interfaces", MagicMock(return_value=freebsd_interfaces_dict) + ), patch("salt.utils.platform.is_netbsd", MagicMock(return_value=True)): + hw_addrs = network.hw_addr("em0") + assert hw_addrs == "00:30:48:ff:ff:ff" + + hw_addrs = network.hw_addr("em1") + assert hw_addrs == "00:30:48:aa:aa:aa" + + hw_addrs = network.hw_addr("dog") + assert ( + hw_addrs + == 'Interface "dog" not in available interfaces: "", "em0", "em1", "lo0", "plip0", "tun0"' + ) + + +def test_interface_and_ip(linux_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + expected = [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ] + ret = network.interface("eth0") + assert ret == expected + + ret = network.interface("dog") + assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' + + ret = network.interface_ip("eth0") + assert ret == "10.10.10.56" + + ret = network.interface_ip("dog") + assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' + + +def test_subnets(linux_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.subnets() + assert ret == ["10.10.8.0/22"] + + ret = network.subnets6() + assert ret == ["fe80::/64"] + + +def test_in_subnet(caplog): + assert network.in_subnet("fe80::/64", "fe80::e23f:49ff:fe85:6aaf") + assert network.in_subnet("10.10.8.0/22", "10.10.10.56") + assert not network.in_subnet("10.10.8.0/22") + + caplog.clear() + ret = network.in_subnet("10.10.8.0/40") + assert "Invalid CIDR '10.10.8.0/40'" in caplog.text + assert not ret + + +def test_ip_addrs(linux_interfaces_dict): + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.ip_addrs("eth0") + assert ret == ["10.10.10.56"] + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.ip_addrs6("eth0") + assert ret == ["fe80::e23f:49ff:fe85:6aaf"] diff --git a/tests/pytests/unit/utils/test_vt.py b/tests/pytests/unit/utils/test_vt.py index 438a6eb09c0..c31b25e623c 100644 --- a/tests/pytests/unit/utils/test_vt.py +++ b/tests/pytests/unit/utils/test_vt.py @@ -1,3 +1,4 @@ +import logging import os import signal @@ -43,10 +44,13 @@ def test_log_sanitize(test_cmd, caplog): cmd, log_stdout=True, log_stderr=True, + log_stdout_level="debug", + log_stderr_level="debug", log_sanitize=password, stream_stdout=False, stream_stderr=False, ) - ret = term.recv() + with caplog.at_level(logging.DEBUG): + ret = term.recv() assert password not in caplog.text assert "******" in caplog.text diff --git a/tests/support/helpers.py b/tests/support/helpers.py index d8b7f2915f8..32bdf043d75 100644 --- a/tests/support/helpers.py +++ b/tests/support/helpers.py @@ -34,6 +34,7 @@ import types import attr import pytest +import pytestskipmarkers.utils.platform import tornado.ioloop import tornado.web from pytestshellutils.exceptions import ProcessFailed @@ -1644,6 +1645,10 @@ class VirtualEnv: return pathlib.Path(self.venv_python).parent def __enter__(self): + if pytestskipmarkers.utils.platform.is_fips_enabled(): + pytest.skip( + "Test cannot currently create virtual environments on a FIPS enabled platform" + ) try: self._create_virtualenv() except subprocess.CalledProcessError: diff --git a/tests/support/pytest/mysql.py b/tests/support/pytest/mysql.py index 0dc52d938ca..20377e3453f 100644 --- a/tests/support/pytest/mysql.py +++ b/tests/support/pytest/mysql.py @@ -3,6 +3,7 @@ import time import attr import pytest +from pytestskipmarkers.utils import platform from saltfactories.utils import random_string # This `pytest.importorskip` here actually works because this module @@ -102,6 +103,10 @@ def mysql_image(request): @pytest.fixture(scope="module") def create_mysql_combo(mysql_image): + if platform.is_fips_enabled(): + if mysql_image.name in ("mysql-server", "percona") and mysql_image.tag == "8.0": + pytest.skip(f"These tests fail on {mysql_image.name}:{mysql_image.tag}") + return MySQLCombo( mysql_name=mysql_image.name, mysql_version=mysql_image.tag, diff --git a/tests/support/virt.py b/tests/support/virt.py index 13498a9f630..c651b01adcc 100644 --- a/tests/support/virt.py +++ b/tests/support/virt.py @@ -1,4 +1,5 @@ import logging +import sys import time import uuid @@ -11,11 +12,6 @@ from tests.conftest import CODE_DIR log = logging.getLogger(__name__) -def _install_salt_in_container(container): - ret = container.run("python3", "-m", "pip", "install", "/salt") - log.debug("Install Salt in the container: %s", ret) - - @attr.s(kw_only=True, slots=True) class SaltVirtMinionContainerFactory(SaltMinion): @@ -72,7 +68,7 @@ class SaltVirtMinionContainerFactory(SaltMinion): self.container_start_check(self._check_script_path_exists) for port in (self.sshd_port, self.libvirt_tcp_port, self.libvirt_tls_port): self.check_ports[port] = port - self.before_start(_install_salt_in_container, self, on_container=False) + self.before_start(self._install_salt_in_container, on_container=False) def _check_script_path_exists(self, timeout_at): while time.time() <= timeout_at: @@ -85,3 +81,37 @@ class SaltVirtMinionContainerFactory(SaltMinion): else: return False return True + + def _install_salt_in_container(self): + ret = self.run("bash", "-c", "echo $SALT_PY_VERSION") + assert ret.returncode == 0 + if not ret.stdout: + log.warning( + "The 'SALT_PY_VERSION' environment variable is not set on the container" + ) + salt_py_version = 3 + ret = self.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() + else: + salt_py_version = requirements_py_version = ret.stdout.strip() + + self.python_executable = f"python{salt_py_version}" + + ret = self.run( + self.python_executable, + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + "/salt", + ) + log.debug("Install Salt in the container: %s", ret) + assert ret.returncode == 0 diff --git a/tests/unit/modules/test_boto3_elasticsearch.py b/tests/unit/modules/test_boto3_elasticsearch.py index 6b82c0abba7..0e60a9e0746 100644 --- a/tests/unit/modules/test_boto3_elasticsearch.py +++ b/tests/unit/modules/test_boto3_elasticsearch.py @@ -28,6 +28,10 @@ except ImportError: # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def __virtual__(): """ diff --git a/tests/unit/modules/test_boto3_route53.py b/tests/unit/modules/test_boto3_route53.py index 9d421471942..5e7332fbb35 100644 --- a/tests/unit/modules/test_boto3_route53.py +++ b/tests/unit/modules/test_boto3_route53.py @@ -25,6 +25,10 @@ except ImportError: # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def __virtual__(): """ diff --git a/tests/unit/modules/test_boto_apigateway.py b/tests/unit/modules/test_boto_apigateway.py index 5f3d2a49822..e6bb33a47dc 100644 --- a/tests/unit/modules/test_boto_apigateway.py +++ b/tests/unit/modules/test_boto_apigateway.py @@ -23,6 +23,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_cloudtrail.py b/tests/unit/modules/test_boto_cloudtrail.py index de31ff955a0..3b6488b3129 100644 --- a/tests/unit/modules/test_boto_cloudtrail.py +++ b/tests/unit/modules/test_boto_cloudtrail.py @@ -22,6 +22,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_cloudtrail module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_cloudwatch_event.py b/tests/unit/modules/test_boto_cloudwatch_event.py index 82d158104aa..4d37747b8f7 100644 --- a/tests/unit/modules/test_boto_cloudwatch_event.py +++ b/tests/unit/modules/test_boto_cloudwatch_event.py @@ -22,6 +22,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import log = logging.getLogger(__name__) diff --git a/tests/unit/modules/test_boto_cognitoidentity.py b/tests/unit/modules/test_boto_cognitoidentity.py index 1e213a169ac..51ae9075a0b 100644 --- a/tests/unit/modules/test_boto_cognitoidentity.py +++ b/tests/unit/modules/test_boto_cognitoidentity.py @@ -21,6 +21,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_elasticsearch_domain.py b/tests/unit/modules/test_boto_elasticsearch_domain.py index 5c5845aa25b..e0329df5cec 100644 --- a/tests/unit/modules/test_boto_elasticsearch_domain.py +++ b/tests/unit/modules/test_boto_elasticsearch_domain.py @@ -21,6 +21,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_iot.py b/tests/unit/modules/test_boto_iot.py index 7c96244ce08..8c61d86dd9b 100644 --- a/tests/unit/modules/test_boto_iot.py +++ b/tests/unit/modules/test_boto_iot.py @@ -23,6 +23,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_iot module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_lambda.py b/tests/unit/modules/test_boto_lambda.py index d32dc9345b6..157e559207d 100644 --- a/tests/unit/modules/test_boto_lambda.py +++ b/tests/unit/modules/test_boto_lambda.py @@ -26,6 +26,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module # the boto_lambda module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_s3_bucket.py b/tests/unit/modules/test_boto_s3_bucket.py index 8e418a8293c..90d868d1141 100644 --- a/tests/unit/modules/test_boto_s3_bucket.py +++ b/tests/unit/modules/test_boto_s3_bucket.py @@ -22,6 +22,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_s3_bucket module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_junos.py b/tests/unit/modules/test_junos.py deleted file mode 100644 index 8f23cb95f93..00000000000 --- a/tests/unit/modules/test_junos.py +++ /dev/null @@ -1,2766 +0,0 @@ -""" - :codeauthor: Rajvi Dhimar -""" -import os - -import pytest -import yaml - -import salt.modules.junos as junos -from tests.support.mixins import LoaderModuleMockMixin, XMLEqualityMixin -from tests.support.mock import ANY, MagicMock, PropertyMock, call, mock_open, patch -from tests.support.unit import TestCase - -try: - from lxml import etree -except ImportError: - import xml.etree.ElementTree as etree - -try: - import jnpr.junos.op as tables_dir - import jxmlease # pylint: disable=unused-import - from jnpr.junos.device import Device - from jnpr.junos.exception import ConnectClosedError, LockError, UnlockError - from jnpr.junos.utils.config import Config - from jnpr.junos.utils.sw import SW - - HAS_JUNOS = True -except ImportError: - HAS_JUNOS = False - - -@pytest.mark.skipif( - not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" -) -class Test_Junos_Module(TestCase, LoaderModuleMockMixin, XMLEqualityMixin): - def setup_loader_modules(self): - return { - junos: { - "__proxy__": { - "junos.conn": self.make_connect, - "junos.get_serialized_facts": self.get_facts, - "junos.reboot_active": MagicMock(return_value=True), - "junos.reboot_clear": MagicMock(return_value=True), - }, - "__salt__": { - "cp.get_template": self.mock_cp, - "cp.get_file": self.mock_cp, - "file.file_exists": MagicMock(return_value=True), - "slsutil.renderer": MagicMock( - return_value="set system host-name dummy" - ), - "event.fire_master": MagicMock(return_value=None), - }, - "_restart_connection": MagicMock(return_value=None), - }, - } - - def mock_cp(self, *args, **kwargs): - pass - - def make_connect(self): - with patch("ncclient.manager.connect") as mock_connect: - self.dev = Device( - host="1.1.1.1", - user="test", - password="test123", - fact_style="old", - gather_facts=False, - ) - self.dev.open() - self.dev.timeout = 30 - self.dev.bind(cu=Config) - self.dev.bind(sw=SW) - self.addCleanup(delattr, self, "dev") - return self.dev - - def raise_exception(self, *args, **kwargs): - raise Exception("Test exception") - - def get_facts(self): - facts = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - return facts - - def test__timeout_decorator(self): - with patch( - "jnpr.junos.Device.timeout", new_callable=PropertyMock - ) as mock_timeout: - mock_timeout.return_value = 30 - - def function(x): - return x - - decorator = junos._timeout_decorator(function) - decorator("Test Mock", dev_timeout=10) - calls = [call(), call(10), call(30)] - mock_timeout.assert_has_calls(calls) - - def test__timeout_cleankwargs_decorator(self): - with patch( - "jnpr.junos.Device.timeout", new_callable=PropertyMock - ) as mock_timeout: - mock_timeout.return_value = 30 - - def function(x): - return x - - decorator = junos._timeout_decorator_cleankwargs(function) - decorator("Test Mock", dev_timeout=10, __pub_args="abc") - calls = [call(), call(10), call(30)] - mock_timeout.assert_has_calls(calls) - - def test_facts_refresh(self): - with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - ret["out"] = True - self.assertEqual(junos.facts_refresh(), ret) - - def test_facts_refresh_exception(self): - with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: - mock_facts_refresh.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.facts_refresh(), ret) - - def test_facts(self): - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - ret["out"] = True - self.assertEqual(junos.facts(), ret) - - def test_facts_exception(self): - with patch.dict( - junos.__proxy__, {"junos.get_serialized_facts": self.raise_exception} - ): - ret = dict() - ret["message"] = 'Could not display facts due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.facts(), ret) - - def test_set_hostname_without_args(self): - ret = dict() - ret["message"] = "Please provide the hostname." - ret["out"] = False - self.assertEqual(junos.set_hostname(), ret) - - def test_set_hostname_load_called_with_valid_name(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load: - junos.set_hostname("test-name") - mock_load.assert_called_with("set system host-name test-name", format="set") - - def test_set_hostname_raise_exception_for_load(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load: - mock_load.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Could not load configuration due to error "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("Test-name"), ret) - - def test_set_hostname_raise_exception_for_commit_check(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to error "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("test-name"), ret) - - def test_set_hostname_one_arg_parsed_correctly(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": ["test-name", {"comment": "Committed via salt"}], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - junos.set_hostname("test-name", **args) - mock_commit.assert_called_with(comment="Committed via salt") - - def test_set_hostname_more_than_one_args_parsed_correctly(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": [ - "test-name", - {"comment": "Committed via salt", "confirm": 5}, - ], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - junos.set_hostname("test-name", **args) - mock_commit.assert_called_with(comment="Committed via salt", confirm=5) - - def test_set_hostname_successful_return_message(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": ["test-name", {"comment": "Committed via salt"}], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully changed hostname." - ret["out"] = True - self.assertEqual(junos.set_hostname("test-name", **args), ret) - - def test_set_hostname_raise_exception_for_commit(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Successfully loaded host-name but commit failed with "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("test-name"), ret) - - def test_set_hostname_fail_commit_check(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: - mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret[ - "message" - ] = "Successfully loaded host-name but pre-commit check failed." - self.assertEqual(junos.set_hostname("test"), ret) - - def test_commit_without_args(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit.return_value = True - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit Successful." - ret["out"] = True - self.assertEqual(junos.commit(), ret) - - def test_commit_raise_commit_check_exception(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not perform commit check due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.commit(), ret) - - def test_commit_raise_commit_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret["out"] = False - ret[ - "message" - ] = 'Commit check succeeded but actual commit failed with "Test exception"' - self.assertEqual(junos.commit(), ret) - - def test_commit_with_single_argument(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [{"sync": True}], - "sync": True, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.commit(**args) - mock_commit.assert_called_with(detail=False, sync=True) - - def test_commit_with_multiple_arguments(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - {"comment": "comitted via salt", "confirm": 3, "detail": True} - ], - "confirm": 3, - "detail": True, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182856987820", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.commit(**args) - mock_commit.assert_called_with( - comment="comitted via salt", detail=True, confirm=3 - ) - - def test_commit_pyez_commit_returning_false(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit.return_value = False - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit failed." - ret["out"] = False - self.assertEqual(junos.commit(), ret) - - def test_commit_pyez_commit_check_returns_false(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret["message"] = "Pre-commit check failed." - self.assertEqual(junos.commit(), ret) - - def test_rollback_exception(self): - with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: - mock_rollback.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Rollback failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_without_args_success(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - mock_rollback.return_value = True - ret = dict() - ret["message"] = "Rollback successful" - ret["out"] = True - self.assertEqual(junos.rollback(), ret) - - def test_rollback_without_args_fail(self): - with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: - mock_rollback.return_value = False - ret = dict() - ret["message"] = "Rollback failed" - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_with_id(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - junos.rollback(id=5) - mock_rollback.assert_called_with(5) - - def test_rollback_with_id_and_single_arg(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [2, {"confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221184518526067", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(id=2, **args) - mock_rollback.assert_called_with(2) - mock_commit.assert_called_with(confirm=2) - - def test_rollback_with_id_and_multiple_args(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "comment": "Comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - 2, - {"comment": "Comitted via salt", "dev_timeout": 40, "confirm": 1}, - ], - "confirm": 1, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221192708251721", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(id=2, **args) - mock_rollback.assert_called_with(2) - mock_commit.assert_called_with( - comment="Comitted via salt", confirm=1, dev_timeout=40 - ) - - def test_rollback_with_only_single_arg(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [{"sync": True}], - "sync": True, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221193615696475", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(**args) - mock_rollback.assert_called_once_with(0) - mock_commit.assert_called_once_with(sync=True) - - def test_rollback_with_only_multiple_args_no_id(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "comment": "Comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - {"comment": "Comitted via salt", "confirm": 3, "sync": True} - ], - "confirm": 3, - "sync": True, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221193945996362", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(**args) - mock_rollback.assert_called_with(0) - mock_commit.assert_called_once_with( - sync=True, confirm=3, comment="Comitted via salt" - ) - - def test_rollback_with_diffs_file_option_when_diff_is_None(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff: - mock_commit_check.return_value = True - mock_diff.return_value = "diff" - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221205153884009", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - "diffs_file": "/home/regress/diff", - } - junos.rollback(**args) - mock_fopen.assert_called_with("/home/regress/diff", "w") - - def test_rollback_with_diffs_file_option(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff: - mock_commit_check.return_value = True - mock_diff.return_value = None - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221205153884009", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - "diffs_file": "/home/regress/diff", - } - junos.rollback(**args) - assert not mock_fopen.called - - def test_rollback_commit_check_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_commit_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Rollback successful but commit failed with error "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_commit_check_fails(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = False - ret = dict() - ret["message"] = "Rollback successful but pre-commit check failed." - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_diff_without_args(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - junos.diff() - mock_diff.assert_called_with(rb_id=0) - - def test_diff_with_arg(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - junos.diff(id=2) - mock_diff.assert_called_with(rb_id=2) - - def test_diff_exception(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - mock_diff.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not get diff with error "Test exception"' - ret["out"] = False - self.assertEqual(junos.diff(), ret) - - def test_ping_without_args(self): - ret = dict() - ret["message"] = "Please specify the destination ip to ping." - ret["out"] = False - self.assertEqual(junos.ping(), ret) - - def test_ping(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - junos.ping("1.1.1.1") - args = mock_execute.call_args - rpc = "51.1.1.1" - self.assertEqualXML(args[0][0], rpc) - - def test_ping_ttl(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - args = { - "__pub_user": "sudo_drajvi", - "__pub_arg": ["1.1.1.1", {"ttl": 3}], - "__pub_fun": "junos.ping", - "__pub_jid": "20170306165237683279", - "__pub_tgt": "mac_min", - "ttl": 3, - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.ping("1.1.1.1", **args) - exec_args = mock_execute.call_args - rpc = "51.1.1.13" - self.assertEqualXML(exec_args[0][0], rpc) - - def test_ping_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.ping("1.1.1.1"), ret) - - def test_cli_without_args(self): - ret = dict() - ret["message"] = "Please provide the CLI command to be executed." - ret["out"] = False - self.assertEqual(junos.cli(), ret) - - def test_cli_with_format_as_empty_string(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - junos.cli("show version", format="") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.return_vale = "CLI result" - ret = dict() - ret["message"] = "CLI result" - ret["out"] = True - junos.cli("show version") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli_format_xml(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_jxml, patch( - "salt.modules.junos.etree.tostring" - ) as mock_to_string, patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.return_value = "test" - mock_jxml.return_value = "test" - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "xml"}], - "format": "xml", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "test" - ret["out"] = True - self.assertEqual(junos.cli("show version", **args), ret) - mock_cli.assert_called_with("show version", "xml", warning=False) - mock_to_string.assert_called_once_with("test") - assert mock_jxml.called - - def test_cli_exception_in_cli(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.cli("show version"), ret) - - def test_cli_output_save(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( - "salt.utils.files.fopen" - ) as mock_fopen: - mock_cli.return_value = "Test return" - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], - "format": "text", - "dest": "/path/to/file", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Test return" - ret["out"] = True - self.assertEqual(junos.cli("show version", **args), ret) - mock_fopen.assert_called_with("/path/to/file", "w") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli_output_save_ioexception(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( - "salt.utils.files.fopen" - ) as mock_fopen: - mock_cli.return_value = "Test return" - mock_fopen.side_effect = IOError() - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], - "format": "text", - "dest": "/path/to/file", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = 'Unable to open "/path/to/file" to write' - ret["out"] = False - self.assertEqual(junos.cli("show version", **args), ret) - - def test_shutdown_without_args(self): - ret = dict() - ret["message"] = "Provide either one of the arguments: shutdown or reboot." - ret["out"] = False - self.assertEqual(junos.shutdown(), ret) - - def test_shutdown_with_reboot_args(self): - with patch("salt.modules.junos.SW.reboot") as mock_reboot: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - assert mock_reboot.called - - def test_shutdown_with_poweroff_args(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - assert mock_poweroff.called - - def test_shutdown_with_shutdown_as_false(self): - ret = dict() - ret["message"] = "Nothing to be done." - ret["out"] = False - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": False}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - - def test_shutdown_with_in_min_arg(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - args = { - "__pub_user": "root", - "in_min": 10, - "__pub_arg": [{"in_min": 10, "shutdown": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222231445709212", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.shutdown(**args) - mock_poweroff.assert_called_with(in_min=10) - - def test_shutdown_with_at_arg(self): - with patch("salt.modules.junos.SW.reboot") as mock_reboot: - args = { - "__pub_user": "root", - "__pub_arg": [{"at": "12:00 pm", "reboot": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "201702276857", - "at": "12:00 pm", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.shutdown(**args) - mock_reboot.assert_called_with(at="12:00 pm") - - def test_shutdown_fail_with_exception(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - mock_poweroff.side_effect = self.raise_exception - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": True}], - "shutdown": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = 'Could not poweroff/reboot because "Test exception"' - ret["out"] = False - self.assertEqual(junos.shutdown(**args), ret) - - def test_install_config_without_args(self): - ret = dict() - ret[ - "message" - ] = "Please provide the salt path where the configuration is present" - ret["out"] = False - self.assertEqual(junos.install_config(), ret) - - def test_install_config_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False - self.assertEqual(junos.install_config("path"), ret) - - def test_install_config_file_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False - self.assertEqual(junos.install_config("path"), ret) - - def test_install_config(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.set"), ret - ) - mock_load.assert_called_with(path="test/path/config", format="set") - - def test_install_config_xml_file(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.xml"), ret - ) - mock_load.assert_called_with(path="test/path/config", format="xml") - - def test_install_config_text_file(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("salt://actual/path/config"), ret) - mock_load.assert_called_with(path="test/path/config", format="text") - - def test_install_config_cache_not_exists(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value=None), - "file.rmdir": MagicMock(return_value="True"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "tempfile.mkdtemp" - ) as mock_mkdtemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_mkdtemp.return_value = "/tmp/argr5351afd" - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config( - "salt://actual/path/config", template_vars=True - ), - ret, - ) - mock_mkstemp.assert_called_with() - - def test_install_config_replace(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"replace": True}], - "replace": True, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.set", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="set", merge=False - ) - - def test_install_config_overwrite(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"overwrite": True}], - "overwrite": True, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.xml", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="xml", overwrite=True - ) - - def test_install_config_overwrite_false(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"overwrite": False}], - "overwrite": False, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="text", merge=True - ) - - def test_install_config_load_causes_exception(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_load.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not load configuration due to : "Test exception"' - ret["format"] = "set" - ret["out"] = False - self.assertEqual(junos.install_config(path="actual/path/config.set"), ret) - - def test_install_config_no_diff(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = None - ret = dict() - ret["message"] = "Configuration already applied!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config"), ret) - - def test_install_config_write_diff(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "copy/config/here"}], - "diffs_file": "copy/config/here", - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - mock_fopen.assert_called_with("copy/config/here", "w") - - def test_install_config_write_diff_exception(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as mock_fopen, patch( - "salt.utils.stringutils.to_str" - ) as mock_strgutils, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_strgutils.side_effect = self.raise_exception - - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "copy/config/here"}], - "diffs_file": "copy/config/here", - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Could not write into diffs_file due to: 'Test exception'" - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - - def test_install_config_commit_params(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - args = { - "comment": "comitted via salt", - "__pub_user": "root", - "__pub_arg": [{"comment": "comitted via salt", "confirm": 3}], - "confirm": 3, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182856987820", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - mock_commit.assert_called_with(comment="comitted via salt", confirm=3) - - def test_install_config_commit_check_fails(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = False - - ret = dict() - ret["message"] = ( - "Loaded configuration but commit check failed, hence rolling back" - " configuration." - ) - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config.xml"), ret) - - def test_install_config_commit_exception(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Commit check successful but commit failed with "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config"), ret) - - def test_install_config_test_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret["message"] = ( - "Commit check passed, but skipping commit for dry-run and rolling back" - " configuration." - ) - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", test=True), ret) - mock_commit.assert_not_called() - - def test_install_config_write_diff_dynamic_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret[ - "message" - ] = "Write diff is not supported with dynamic/ephemeral configuration mode" - ret["out"] = False - self.assertEqual( - junos.install_config( - "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" - ), - ret, - ) - mock_commit.assert_not_called() - - def test_install_config_unknown_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "install_config failed due to: unsupported action: abcdef" - ret["out"] = False - self.assertEqual( - junos.install_config("actual/path/config", mode="abcdef"), ret - ) - mock_commit.assert_not_called() - - def test_zeroize(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - result = junos.zeroize() - ret = dict() - ret["out"] = True - ret["message"] = "Completed zeroize and rebooted" - mock_cli.assert_called_once_with("request system zeroize") - self.assertEqual(result, ret) - - def test_zeroize_throw_exception(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not zeroize due to : "Test exception"' - ret["out"] = False - self.assertEqual(junos.zeroize(), ret) - - def test_install_os_without_args(self): - ret = dict() - ret[ - "message" - ] = "Please provide the salt path where the junos image is present." - ret["out"] = False - self.assertEqual(junos.install_os(), ret) - - def test_install_os_cp_fails(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="xxxx"), - "file.rmdir": MagicMock(return_value="True"), - }, - ): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = False - mock_install.return_value = ( - False, - "Invalid path. Please provide a valid image path", - ) - ret = dict() - ret["message"] = ( - "Installation failed. Reason: Invalid path. Please provide a valid" - " image path" - ) - ret["out"] = False - self.assertEqual(junos.install_os("salt://image/path/"), ret) - - def test_install_os_image_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret["message"] = "Invalid path. Please provide a valid image path" - ret["out"] = False - self.assertEqual(junos.install_os("/image/path/"), ret) - - def test_install_os(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_failure(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = False, "because we are testing failure" - ret = dict() - ret["out"] = False - ret[ - "message" - ] = "Installation failed. Reason: because we are testing failure" - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_with_reboot_arg(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "jnpr.junos.utils.sw.SW.reboot" - ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.install_os", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully installed and rebooted!" - ret["out"] = True - self.assertEqual(junos.install_os("path", **args), ret) - - def test_install_os_pyez_install_throws_exception(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Installation failed due to: "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_with_reboot_raises_exception(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "jnpr.junos.utils.sw.SW.reboot" - ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - mock_reboot.side_effect = self.raise_exception - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.install_os", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret[ - "message" - ] = 'Installation successful but reboot failed due to : "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_os("path", **args), ret) - - def test_install_os_no_copy(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path", no_copy=True), ret) - mock_install.assert_called_with( - "path", no_copy=True, progress=True, timeout=1800 - ) - mock_mkstemp.assert_not_called() - mock_safe_rm.assert_not_called() - - def test_install_os_issu(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path", issu=True), ret) - mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) - - def test_install_os_add_params(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - remote_path = "/path/to/file" - self.assertEqual( - junos.install_os( - "path", remote_path=remote_path, nssu=True, validate=True - ), - ret, - ) - mock_install.assert_called_with( - ANY, - nssu=True, - remote_path=remote_path, - progress=True, - validate=True, - timeout=1800, - ) - - def test_file_copy_without_args(self): - self.assertRaises(TypeError, junos.file_copy) - - @patch("paramiko.SSHClient") - @patch("scp.SCPClient.put") - @patch("scp.SCPClient.__init__") - def test_file_copy_invalid_src(self, mock_scpclient, mock_put, mock_ssh): - mock_scpclient.return_value = None - invalid_path = "invalid/file/path" - mock_put.side_effect = Exception(invalid_path) - with patch("os.path.isfile") as mock_isfile: - mock_isfile.return_value = False - ret = dict() - ret["message"] = 'Could not copy file : "invalid/file/path"' - ret["out"] = False - self.assertEqual(junos.file_copy(invalid_path, "file"), ret) - - def test_file_copy_without_dest(self): - self.assertRaises(TypeError, junos.file_copy, src="/home/user/config.set") - - def test_file_copy(self): - with patch("salt.modules.junos.SCP") as mock_scp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_isfile.return_value = True - ret = dict() - ret["message"] = "Successfully copied file from test/src/file to file" - ret["out"] = True - self.assertEqual(junos.file_copy(dest="file", src="test/src/file"), ret) - - def test_file_copy_exception(self): - with patch("salt.modules.junos.SCP") as mock_scp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_isfile.return_value = True - mock_scp.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not copy file : "Test exception"' - ret["out"] = False - self.assertEqual(junos.file_copy(dest="file", src="test/src/file"), ret) - - # These test cases test the __virtual__ function, used internally by salt - # to check if the given module is loadable. This function is not used by - # an external user. - - def test_virtual_proxy_unavailable(self): - with patch.dict(junos.__opts__, {}): - res = ( - False, - "The junos or dependent module could not be loaded: " - "junos-eznc or jxmlease or yamlordereddictloader or " - "proxy could not be loaded.", - ) - self.assertEqual(junos.__virtual__(), res) - - def test_virtual_all_true(self): - with patch.dict(junos.__opts__, {"proxy": "test"}): - self.assertEqual(junos.__virtual__(), "junos") - - def test_rpc_without_args(self): - ret = dict() - ret["message"] = "Please provide the rpc to execute." - ret["out"] = False - self.assertEqual(junos.rpc(), ret) - - def test_rpc_get_config_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rpc("get_config"), ret) - - def test_rpc_get_config_filter(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.return_value = etree.XML("") - args = { - "__pub_user": "root", - "__pub_arg": [ - "get-config", - {"filter": ""}, - ], - "__pub_fun": "junos.rpc", - "__pub_jid": "20170314162715866528", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "filter": "", - "__pub_ret": "", - } - junos.rpc("get-config", **args) - exec_args = mock_execute.call_args - expected_rpc = ( - "' - ) - self.assertEqualXML(exec_args[0][0], expected_rpc) - - def test_rpc_get_interface_information(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - junos.rpc("get-interface-information", format="json") - args = mock_execute.call_args - expected_rpc = '' - self.assertEqualXML(args[0][0], expected_rpc) - - def test_rpc_get_interface_information_with_kwargs(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - args = { - "__pub_user": "root", - "__pub_arg": [ - "get-interface-information", - "", - "text", - {"terse": True, "interface_name": "lo0", "format": "text"}, - ], - "format": "text", - "terse": True, - "__pub_fun": "junos.rpc", - "__pub_jid": "20170314160943363563", - "__pub_tgt": "mac_min", - "interface_name": "lo0", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rpc("get-interface-information", **args) - args = mock_execute.call_args - expected_rpc = ( - '' - "lo0" - ) - self.assertEqualXML(etree.tostring(args[0][0]), expected_rpc) - - def test_rpc_get_chassis_inventory_filter_as_arg(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_jxmlease, patch( - "salt.modules.junos.etree.tostring" - ) as mock_tostring, patch( - "salt.modules.junos.logging.Logger.warning" - ) as mock_warning, patch( - "jnpr.junos.device.Device.execute" - ) as mock_execute: - junos.rpc( - "get-chassis-inventory", - filter="", - ) - mock_warning.assert_called_with( - 'Filter ignored as it is only used with "get-config" rpc' - ) - - def test_rpc_get_interface_information_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rpc("get_interface_information"), ret) - - def test_rpc_write_file_format_text(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.return_value = etree.XML( - "text rpc reply" - ) - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file", format="text") - writes = m_open.write_calls() - assert writes == ["text rpc reply"], writes - - def test_rpc_write_file_format_json(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file", format="json") - writes = m_open.write_calls() - assert writes == ["json rpc reply"], writes - - def test_rpc_write_file(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_parse, patch( - "salt.modules.junos.etree.tostring" - ) as mock_tostring, patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_tostring.return_value = "xml rpc reply" - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file") - writes = m_open.write_calls() - assert writes == ["xml rpc reply"], writes - - def test_lock_success(self): - ret_exp = {"out": True, "message": "Successfully locked the configuration."} - ret = junos.lock() - self.assertEqual(ret, ret_exp) - - def test_lock_error(self): - ret_exp = {"out": False, "message": 'Could not gain lock due to : "LockError"'} - with patch("jnpr.junos.utils.config.Config.lock") as mock_lock: - mock_lock.side_effect = LockError(None) - ret = junos.lock() - self.assertEqual(ret, ret_exp) - - def test_unlock_success(self): - ret_exp = {"out": True, "message": "Successfully unlocked the configuration."} - ret = junos.unlock() - self.assertEqual(ret, ret_exp) - - def test_unlock_error(self): - ret_exp = { - "out": False, - "message": 'Could not unlock configuration due to : "UnlockError"', - } - with patch("jnpr.junos.utils.config.Config.unlock") as mock_unlock: - mock_unlock.side_effect = UnlockError(None) - ret = junos.unlock() - self.assertEqual(ret, ret_exp) - - def test_load_none_path(self): - ret_exp = { - "out": False, - "message": ( - "Please provide the salt path where the configuration is present" - ), - } - ret = junos.load() - self.assertEqual(ret, ret_exp) - - def test_load_wrong_tmp_file(self): - ret_exp = { - "out": False, - "message": ( - 'Could not load configuration due to : "[Errno 2] No such file or' - " directory: '/pat/to/tmp/file'\"" - ), - "format": "text", - } - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("os.path.getsize") as mock_getsize, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_mkstmp.return_value = "/pat/to/tmp/file" - mock_getsize.return_value = 1000 - ret = junos.load("salt://path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_invalid_path(self): - with patch("salt.utils.files.mkstemp") as mock_mkstmp: - mock_mkstmp.return_value = "/path/to/file" - self.assertRaises(FileNotFoundError, junos.load, path="/path/to/file") - - def test_load_no_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file") - mock_load.assert_called_with(format="text", path="/path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_xml_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("os.path.isfile") as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file.xml" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.xml") - mock_load.assert_called_with(format="xml", path="/path/to/file.xml") - self.assertEqual(ret, ret_exp) - - def test_load_xml_extension_with_kwargs(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen" - ) as fopen, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.xml", template_vars=dict(hostname="test")) - mock_load.assert_called_with( - format="xml", path="/path/to/file", template_vars={"hostname": "test"} - ) - self.assertEqual(ret, ret_exp) - - def test_load_set_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file.set" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.set") - mock_load.assert_called_with(format="set", path="/path/to/file.set") - self.assertEqual(ret, ret_exp) - - def test_load_replace_true(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", replace=True) - mock_load.assert_called_with( - format="text", merge=False, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_replace_false(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", replace=False) - mock_load.assert_called_with( - format="text", replace=False, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_overwrite_true(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", overwrite=True) - mock_load.assert_called_with( - format="text", overwrite=True, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_overwrite_false(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", overwrite=False) - mock_load.assert_called_with( - format="text", merge=True, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_error(self): - ret_exp = { - "out": False, - "format": "text", - "message": 'Could not load configuration due to : "Test Error"', - } - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - mock_load.side_effect = Exception("Test Error") - ret = junos.load("/path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_template(self): - ret_exp = { - "out": True, - "message": "Successfully loaded the configuration.", - } - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load: - ret = junos.load("tests/unit/modules/templates/basic2.j2", test=True) - self.assertEqual(ret, ret_exp) - - def test_commit_check_success(self): - ret_exp = {"out": True, "message": "Commit check succeeded."} - ret = junos.commit_check() - self.assertEqual(ret, ret_exp) - - def test_commit_check_error(self): - ret_exp = {"out": False, "message": "Commit check failed with "} - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_check: - mock_check.side_effect = Exception - ret = junos.commit_check() - self.assertEqual(ret, ret_exp) - - def test_get_table_wrong_path(self): - table = "ModuleTable" - file = "sample.yml" - path = "/path/to/file" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Given table file {} cannot be located".format(file), - } - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.factory.FactoryLoader.load" - ) as mock_load: - ret = junos.get_table(table, file, path) - self.assertEqual(ret, ret_exp) - mock_load.assert_not_called() - - def test_get_table_no_path_no_file(self): - table = "ModuleTable" - file = "inventory.yml" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Given table file {} cannot be located".format(file), - } - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( - "glob.glob" - ) as mock_fopen: - mock_fopen.return_value = [] - ret = junos.get_table(table, file) - self.assertEqual(ret, ret_exp) - mock_load.assert_not_called() - - def test_get_table_yaml_load_error(self): - table = "ModuleTable" - file = "inventory.yml" - path = "/path/to/file" - message = "File not located test" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Uncaught exception during YAML Load - please report: {}".format( - message - ), - } - with patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as mock_file, patch("glob.glob") as mock_fopen, patch.object( - yaml, "load" - ) as mock_yamlload: - mock_fopen.return_value = ["/path/to/file"] - mock_yamlload.side_effect = OSError(message) - ret = junos.get_table(table, file, path) - self.assertEqual(ret, ret_exp) - - def test_get_table_api_error(self): - table = "sample" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "sample", - "message": ( - "Uncaught exception during get API call - please report: '{}'".format( - str(table) - ) - ), - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "yaml.load" - ) as mock_yamlload, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open: - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], ret_exp["out"]) - self.assertEqual(ret["tablename"], ret_exp["tablename"]) - self.assertEqual(ret["message"], ret_exp["message"]) - - def test_get_table_connect_closed_error(self): - table = "ModuleTable" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": ( - "Got ConnectClosedError exception. Connection lost with Device(1.1.1.1)" - ), - } - with patch("jnpr.junos.factory.optable.OpTable.get") as mock_load, patch( - "yaml.load" - ) as mock_yamlload, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open: - dev = Device(host="1.1.1.1", user="rick") - mock_load.side_effect = ConnectClosedError(dev) - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], ret_exp["out"]) - self.assertEqual(ret["tablename"], ret_exp["tablename"]) - self.assertEqual(ret["message"], ret_exp["message"]) - - def test_get_table_inventory(self): - table = "ModuleTable" - file = "inventory.yml" - pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__)) - path = pyez_tables_path - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("yaml.load") as mock_yamlload, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file, path) - self.assertEqual(ret["out"], True) - - def test_get_table_no_path_inventory(self): - table = "ModuleTable" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("yaml.load") as mock_yamlload, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], True) diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py index 7e72d07b8e7..2fee41f8bd9 100644 --- a/tests/unit/modules/test_virt.py +++ b/tests/unit/modules/test_virt.py @@ -2,32 +2,27 @@ virt execution module unit tests """ -# pylint: disable=3rd-party-module-not-gated - - import datetime import os import shutil import tempfile import xml.etree.ElementTree as ET +import pytest + import salt.config import salt.modules.config as config import salt.modules.virt as virt import salt.syspaths import salt.utils.yaml from salt.exceptions import CommandExecutionError, SaltInvocationError - -# pylint: disable=import-error from tests.support.helpers import dedent from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase -# pylint: disable=invalid-name,protected-access,attribute-defined-outside-init,too-many-public-methods,unused-argument - -class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors +class LibvirtMock(MagicMock): """ Libvirt library mock """ @@ -1882,6 +1877,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin): ], ) + @pytest.mark.skip_on_fips_enabled_platform def test_init(self): """ Test init() function diff --git a/tests/unit/modules/test_zcbuildout.py b/tests/unit/modules/test_zcbuildout.py index ac98435ffa0..db7a862f727 100644 --- a/tests/unit/modules/test_zcbuildout.py +++ b/tests/unit/modules/test_zcbuildout.py @@ -20,12 +20,13 @@ from tests.support.runtests import RUNTIME_VARS from tests.support.unit import TestCase pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_windows( reason=( "Special steps are required for proper SSL validation because " "`easy_install` is too old(and deprecated)." ) - ) + ), ] KNOWN_VIRTUALENV_BINARY_NAMES = ( diff --git a/tests/unit/states/test_boto_apigateway.py b/tests/unit/states/test_boto_apigateway.py index 51c85d6058a..7cf95a43442 100644 --- a/tests/unit/states/test_boto_apigateway.py +++ b/tests/unit/states/test_boto_apigateway.py @@ -28,6 +28,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/states/test_boto_cognitoidentity.py b/tests/unit/states/test_boto_cognitoidentity.py index 4354df0546f..f84a055dd2d 100644 --- a/tests/unit/states/test_boto_cognitoidentity.py +++ b/tests/unit/states/test_boto_cognitoidentity.py @@ -25,6 +25,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/states/test_zcbuildout.py b/tests/unit/states/test_zcbuildout.py index b5f919ac6b2..7cafbba6a62 100644 --- a/tests/unit/states/test_zcbuildout.py +++ b/tests/unit/states/test_zcbuildout.py @@ -11,12 +11,13 @@ from tests.support.runtests import RUNTIME_VARS from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_windows( reason=( "Special steps are required for proper SSL validation because " "`easy_install` is too old(and deprecated)." ) - ) + ), ] diff --git a/tests/unit/utils/test_boto3mod.py b/tests/unit/utils/test_boto3mod.py index 74f6478e272..0a9509ab598 100644 --- a/tests/unit/utils/test_boto3mod.py +++ b/tests/unit/utils/test_boto3mod.py @@ -24,6 +24,10 @@ except ImportError: REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + @pytest.mark.skipif(HAS_BOTO3 is False, reason="The boto module must be installed.") @pytest.mark.skipif( diff --git a/tests/unit/utils/test_botomod.py b/tests/unit/utils/test_botomod.py index bf3ca37a837..3e67cbec698 100644 --- a/tests/unit/utils/test_botomod.py +++ b/tests/unit/utils/test_botomod.py @@ -53,6 +53,11 @@ except ImportError: return stub_function +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + + required_boto_version = "2.0.0" required_boto3_version = "1.2.1" region = "us-east-1" diff --git a/tests/unit/utils/test_find.py b/tests/unit/utils/test_find.py index bc81c48554d..1960d4a3510 100644 --- a/tests/unit/utils/test_find.py +++ b/tests/unit/utils/test_find.py @@ -332,6 +332,7 @@ class TestPrintOption(TestCase): option = salt.utils.find.PrintOption("print", "path user") self.assertEqual(option.requires(), salt.utils.find._REQUIRES_STAT) + @pytest.mark.skip_on_fips_enabled_platform def test_print_option_execute(self): hello_file = os.path.join(self.tmpdir, "hello.txt") with salt.utils.files.fopen(hello_file, "w") as fp_: diff --git a/tests/unit/utils/test_hashutils.py b/tests/unit/utils/test_hashutils.py index 5cf11c114ef..b9a685957a5 100644 --- a/tests/unit/utils/test_hashutils.py +++ b/tests/unit/utils/test_hashutils.py @@ -1,3 +1,5 @@ +import pytest + import salt.utils.hashutils from tests.support.unit import TestCase @@ -87,6 +89,7 @@ class HashutilsTestCase(TestCase): self.bytes, ) + @pytest.mark.skip_on_fips_enabled_platform def test_md5_digest(self): """ Ensure that this function converts the value passed to bytes before diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py deleted file mode 100644 index f7d39729300..00000000000 --- a/tests/unit/utils/test_network.py +++ /dev/null @@ -1,1313 +0,0 @@ -import logging -import socket -import textwrap -import time - -import pytest - -import salt.exceptions -import salt.utils.network as network -from salt._compat import ipaddress -from tests.support.mock import MagicMock, create_autospec, mock_open, patch -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - -LINUX = """\ -eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af - inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0 - inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link - UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 - RX packets:643363 errors:0 dropped:0 overruns:0 frame:0 - TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:1000 - RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB) - -lo Link encap:Local Loopback - inet addr:127.0.0.1 Mask:255.0.0.0 - inet6 addr: ::1/128 Scope:Host - UP LOOPBACK RUNNING MTU:65536 Metric:1 - RX packets:548901 errors:0 dropped:0 overruns:0 frame:0 - TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:0 - RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB) -""" - -FREEBSD = """ -em0: flags=8843 metric 0 mtu 1500 - options=4219b - ether 00:30:48:ff:ff:ff - inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255 - inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63 - media: Ethernet autoselect (1000baseT ) - status: active -em1: flags=8c02 metric 0 mtu 1500 - options=4219b - ether 00:30:48:aa:aa:aa - media: Ethernet autoselect - status: no carrier -plip0: flags=8810 metric 0 mtu 1500 -lo0: flags=8049 metric 0 mtu 16384 - options=3 - inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8 - inet6 ::1 prefixlen 128 - inet 127.0.0.1 netmask 0xff000000 - nd6 options=3 -tun0: flags=8051 metric 0 mtu 1500 - options=80000 - inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff - Opened by PID 1964 -""" - -SOLARIS = """\ -lo0: flags=2001000849 mtu 8232 index 1 - inet 127.0.0.1 netmask ff000000 -net0: flags=100001100943 mtu 1500 index 2 - inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63 -ilbint0: flags=110001100843 mtu 1500 index 3 - inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255 -ilbext0: flags=110001100843 mtu 1500 index 4 - inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31 -ilbext0:1: flags=110001100843 mtu 1500 index 4 - inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31 -vpn0: flags=1000011008d1 mtu 1480 index 5 - inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 - tunnel hop limit 64 - inet 10.6.0.14 --> 10.6.0.15 netmask ff000000 -lo0: flags=2002000849 mtu 8252 index 1 - inet6 ::1/128 -net0: flags=120002004941 mtu 1500 index 2 - inet6 fe80::221:9bff:fefd:2a22/10 -ilbint0: flags=120002000840 mtu 1500 index 3 - inet6 ::/0 -ilbext0: flags=120002000840 mtu 1500 index 4 - inet6 ::/0 -vpn0: flags=120002200850 mtu 1480 index 5 - inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 - tunnel hop limit 64 - inet6 ::/0 --> fe80::b2d6:7c10 -""" - -NETBSD = """\ -vioif0: flags=0x8943 mtu 1500 - ec_capabilities=1 - ec_enabled=0 - address: 00:a0:98:e6:83:18 - inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0 - inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1 -lo0: flags=0x8049 mtu 33624 - inet 127.0.0.1/8 flags 0x0 - inet6 ::1/128 flags 0x20 - inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2 -""" - -FREEBSD_SOCKSTAT = """\ -USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS -root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506 -""" - -FREEBSD_SOCKSTAT_WITH_FAT_PID = """\ -USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS -salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 -""" - -OPENBSD_NETSTAT = """\ -Active Internet connections -Proto Recv-Q Send-Q Local Address Foreign Address (state) -tcp 0 0 127.0.0.1.61115 127.0.0.1.4506 ESTABLISHED -""" - -LINUX_NETLINK_SS_OUTPUT = """\ -State Recv-Q Send-Q Local Address:Port Peer Address:Port -TIME-WAIT 0 0 [::1]:8009 [::1]:40368 -LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* -ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 -ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 -ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 -ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 -""" - -IPV4_SUBNETS = { - True: ("10.10.0.0/24",), - False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"), -} -IPV6_SUBNETS = { - True: ("::1/128",), - False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"), -} - - -class NetworkTestCase(TestCase): - def test_sanitize_host_ip(self): - ret = network.sanitize_host("10.1./2.$3") - self.assertEqual(ret, "10.1.2.3") - - def test_sanitize_host_name(self): - """ - Should not remove the underscore - """ - ret = network.sanitize_host("foo_bar") - self.assertEqual(ret, "foo_bar") - - def test_host_to_ips(self): - """ - NOTE: When this test fails it's usually because the IP address has - changed. In these cases, we just need to update the IP address in the - assertion. - """ - - def _side_effect(host, *args): - try: - return { - "github.com": [ - (2, 1, 6, "", ("192.30.255.112", 0)), - (2, 1, 6, "", ("192.30.255.113", 0)), - ], - "ipv6host.foo": [ - (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), - ], - }[host] - except KeyError: - raise socket.gaierror(-2, "Name or service not known") - - getaddrinfo_mock = MagicMock(side_effect=_side_effect) - with patch.object(socket, "getaddrinfo", getaddrinfo_mock): - # Test host that can be resolved - ret = network.host_to_ips("github.com") - self.assertEqual(ret, ["192.30.255.112", "192.30.255.113"]) - # Test ipv6 - ret = network.host_to_ips("ipv6host.foo") - self.assertEqual(ret, ["2001:a71::1"]) - # Test host that can't be resolved - ret = network.host_to_ips("someothersite.com") - self.assertEqual(ret, None) - - def test_generate_minion_id(self): - self.assertTrue(network.generate_minion_id()) - - def test__generate_minion_id_with_unicode_in_etc_hosts(self): - """ - Test that unicode in /etc/hosts doesn't raise an error when - _generate_minion_id() helper is called to gather the hosts. - """ - content = textwrap.dedent( - """\ - # 以下为主机名解析 - ## ccc - 127.0.0.1 localhost thisismyhostname # 本机 - """ - ) - fopen_mock = mock_open(read_data={"/etc/hosts": content}) - with patch("salt.utils.files.fopen", fopen_mock): - assert "thisismyhostname" in network._generate_minion_id() - - def test_is_ip(self): - self.assertTrue(network.is_ip("10.10.0.3")) - self.assertFalse(network.is_ip("0.9.800.1000")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_is_ipv4(self): - self.assertTrue(network.is_ipv4("10.10.0.3")) - self.assertFalse(network.is_ipv4("10.100.1")) - self.assertFalse(network.is_ipv4("2001:db8:0:1:1:1:1:1")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv4("sixteen-char-str")) - - def test_is_ipv6(self): - self.assertTrue(network.is_ipv6("2001:db8:0:1:1:1:1:1")) - self.assertTrue(network.is_ipv6("0:0:0:0:0:0:0:1")) - self.assertTrue(network.is_ipv6("::1")) - self.assertTrue(network.is_ipv6("::")) - self.assertTrue(network.is_ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334")) - self.assertTrue(network.is_ipv6("2001:0db8:85a3::8a2e:0370:7334")) - self.assertFalse(network.is_ipv6("2001:0db8:0370:7334")) - self.assertFalse(network.is_ipv6("2001:0db8:::0370:7334")) - self.assertFalse(network.is_ipv6("10.0.1.2")) - self.assertFalse(network.is_ipv6("2001.0db8.85a3.0000.0000.8a2e.0370.7334")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_ipv6(self): - self.assertTrue(network.ipv6("2001:db8:0:1:1:1:1:1")) - self.assertTrue(network.ipv6("0:0:0:0:0:0:0:1")) - self.assertTrue(network.ipv6("::1")) - self.assertTrue(network.ipv6("::")) - self.assertTrue(network.ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334")) - self.assertTrue(network.ipv6("2001:0db8:85a3::8a2e:0370:7334")) - self.assertTrue(network.ipv6("2001:67c:2e8::/48")) - - def test_is_loopback(self): - self.assertTrue(network.is_loopback("127.0.1.1")) - self.assertTrue(network.is_loopback("::1")) - self.assertFalse(network.is_loopback("10.0.1.2")) - self.assertFalse(network.is_loopback("2001:db8:0:1:1:1:1:1")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_parse_host_port(self): - _ip = ipaddress.ip_address - good_host_ports = { - "10.10.0.3": (_ip("10.10.0.3").compressed, None), - "10.10.0.3:1234": (_ip("10.10.0.3").compressed, 1234), - "2001:0db8:85a3::8a2e:0370:7334": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - None, - ), - "[2001:0db8:85a3::8a2e:0370:7334]:1234": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - 1234, - ), - "2001:0db8:85a3::7334": (_ip("2001:0db8:85a3::7334").compressed, None), - "[2001:0db8:85a3::7334]:1234": ( - _ip("2001:0db8:85a3::7334").compressed, - 1234, - ), - } - bad_host_ports = [ - "10.10.0.3/24", - "10.10.0.3::1234", - "2001:0db8:0370:7334", - "2001:0db8:0370::7334]:1234", - "2001:0db8:0370:0:a:b:c:d:1234", - "host name", - "host name:1234", - "10.10.0.3:abcd", - ] - for host_port, assertion_value in good_host_ports.items(): - host = port = None - host, port = network.parse_host_port(host_port) - self.assertEqual((host, port), assertion_value) - - for host_port in bad_host_ports: - try: - self.assertRaises(ValueError, network.parse_host_port, host_port) - except AssertionError as _e_: - log.error( - 'bad host_port value: "%s" failed to trigger ValueError exception', - host_port, - ) - raise _e_ - - def test_dns_check(self): - hosts = [ - { - "host": "10.10.0.3", - "port": "", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "10.10.0.3", - "port": "1234", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "2001:0db8:85a3::8a2e:0370:7334", - "port": "", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "2001:0db8:85a3::8a2e:370:7334", - "port": "1234", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "salt-master", - "port": "1234", - "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], - "ret": "127.0.0.1", - }, - ] - for host in hosts: - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, return_value=host["mocked"]), - ): - with patch("socket.socket", create_autospec(socket.socket)): - ret = network.dns_check(host["host"], host["port"]) - self.assertEqual(ret, host["ret"]) - - def test_dns_check_ipv6_filter(self): - # raise exception to skip everything after the getaddrinfo call - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, side_effect=Exception), - ) as getaddrinfo: - for ipv6, param in [ - (None, socket.AF_UNSPEC), - (True, socket.AF_INET6), - (False, socket.AF_INET), - ]: - with self.assertRaises(Exception): - network.dns_check("foo", "1", ipv6=ipv6) - getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM) - - def test_dns_check_errors(self): - with patch.object( - socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[]) - ): - with self.assertRaisesRegex( - salt.exceptions.SaltSystemExit, - "DNS lookup or connection check of 'foo' failed", - ): - network.dns_check("foo", "1") - - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, side_effect=TypeError), - ): - with self.assertRaisesRegex( - salt.exceptions.SaltSystemExit, "Invalid or unresolveable address" - ): - network.dns_check("foo", "1") - - def test_test_addrs(self): - # subset of real data from getaddrinfo against saltstack.com - addrinfo = [ - (30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), - (30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), - (30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), - (30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), - (2, 1, 6, "", ("13.35.99.52", 0)), - (2, 2, 17, "", ("13.35.99.85", 0)), - (2, 1, 6, "", ("13.35.99.85", 0)), - (2, 2, 17, "", ("13.35.99.122", 0)), - ] - with patch("socket.socket", create_autospec(socket.socket)) as s: - # we connect to the first address - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 1) - self.assertTrue(addrs[0] == addrinfo[0][4][0]) - - # the first lookup fails, succeeds on next check - s.side_effect = [socket.error, MagicMock()] - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 1) - self.assertTrue(addrs[0] == addrinfo[2][4][0]) - - # attempt to connect to resolved address with default timeout - s.side_effect = socket.error - addrs = network._test_addrs(addrinfo, 80) - time.sleep(2) - self.assertFalse(len(addrs) == 0) - - # nothing can connect, but we've eliminated duplicates - s.side_effect = socket.error - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 5) - - def test_is_subnet(self): - for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS): - for item in subnet_data[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_subnet(item)) - for item in subnet_data[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_subnet(item)) - - def test_is_ipv4_subnet(self): - for item in IPV4_SUBNETS[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_ipv4_subnet(item)) - for item in IPV4_SUBNETS[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_ipv4_subnet(item)) - - def test_is_ipv6_subnet(self): - for item in IPV6_SUBNETS[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_ipv6_subnet(item)) - for item in IPV6_SUBNETS[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_ipv6_subnet(item)) - - def test_cidr_to_ipv4_netmask(self): - self.assertEqual(network.cidr_to_ipv4_netmask(24), "255.255.255.0") - self.assertEqual(network.cidr_to_ipv4_netmask(21), "255.255.248.0") - self.assertEqual(network.cidr_to_ipv4_netmask(17), "255.255.128.0") - self.assertEqual(network.cidr_to_ipv4_netmask(9), "255.128.0.0") - self.assertEqual(network.cidr_to_ipv4_netmask(36), "") - self.assertEqual(network.cidr_to_ipv4_netmask("lol"), "") - - def test_number_of_set_bits_to_ipv4_netmask(self): - set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) - self.assertEqual(set_bits_to_netmask, "255.255.255.0") - set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) - - def test_hex2ip(self): - self.assertEqual(network.hex2ip("0x4A7D2B63"), "74.125.43.99") - self.assertEqual(network.hex2ip("0x4A7D2B63", invert=True), "99.43.125.74") - self.assertEqual( - network.hex2ip("00000000000000000000FFFF7F000001"), "127.0.0.1" - ) - self.assertEqual( - network.hex2ip("0000000000000000FFFF00000100007F", invert=True), "127.0.0.1" - ) - self.assertEqual( - network.hex2ip("20010DB8000000000000000000000000"), "2001:db8::" - ) - self.assertEqual( - network.hex2ip("B80D0120000000000000000000000000", invert=True), - "2001:db8::", - ) - - def test_interfaces_ifconfig_linux(self): - interfaces = network._interfaces_ifconfig(LINUX) - self.assertEqual( - interfaces, - { - "eth0": { - "hwaddr": "e0:3f:49:85:6a:af", - "inet": [ - { - "address": "10.10.10.56", - "broadcast": "10.10.10.255", - "netmask": "255.255.252.0", - } - ], - "inet6": [ - { - "address": "fe80::e23f:49ff:fe85:6aaf", - "prefixlen": "64", - "scope": "link", - } - ], - "up": True, - }, - "lo": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], - "up": True, - }, - }, - ) - - def test_interfaces_ifconfig_freebsd(self): - interfaces = network._interfaces_ifconfig(FREEBSD) - self.assertEqual( - interfaces, - { - "": {"up": False}, - "em0": { - "hwaddr": "00:30:48:ff:ff:ff", - "inet": [ - { - "address": "10.10.10.250", - "broadcast": "10.10.10.255", - "netmask": "255.255.255.224", - }, - { - "address": "10.10.10.56", - "broadcast": "10.10.10.63", - "netmask": "255.255.255.192", - }, - ], - "up": True, - }, - "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, - {"address": "::1", "prefixlen": "128", "scope": None}, - ], - "up": True, - }, - "plip0": {"up": False}, - "tun0": { - "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], - "up": True, - }, - }, - ) - - def test_interfaces_ifconfig_solaris(self): - with patch("salt.utils.platform.is_sunos", lambda: True): - interfaces = network._interfaces_ifconfig(SOLARIS) - expected_interfaces = { - "ilbint0": { - "inet6": [], - "inet": [ - { - "broadcast": "10.6.0.255", - "netmask": "255.255.255.0", - "address": "10.6.0.11", - } - ], - "up": True, - }, - "lo0": { - "inet6": [{"prefixlen": "128", "address": "::1"}], - "inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}], - "up": True, - }, - "ilbext0": { - "inet6": [], - "inet": [ - { - "broadcast": "10.10.11.31", - "netmask": "255.255.255.224", - "address": "10.10.11.11", - }, - { - "broadcast": "10.10.11.31", - "netmask": "255.255.255.224", - "address": "10.10.11.12", - }, - ], - "up": True, - }, - "vpn0": { - "inet6": [], - "inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}], - "up": True, - }, - "net0": { - "inet6": [ - {"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"} - ], - "inet": [ - { - "broadcast": "10.10.10.63", - "netmask": "255.255.255.224", - "address": "10.10.10.38", - } - ], - "up": True, - }, - } - self.assertEqual(interfaces, expected_interfaces) - - def test_interfaces_ifconfig_netbsd(self): - interfaces = network._netbsd_interfaces_ifconfig(NETBSD) - self.assertEqual( - interfaces, - { - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "lo0"} - ], - "up": True, - }, - "vioif0": { - "hwaddr": "00:a0:98:e6:83:18", - "inet": [ - { - "address": "192.168.1.80", - "broadcast": "192.168.1.255", - "netmask": "255.255.255.0", - } - ], - "inet6": [ - { - "address": "fe80::2a0:98ff:fee6:8318", - "prefixlen": "64", - "scope": "vioif0", - } - ], - "up": True, - }, - }, - ) - - def test_freebsd_remotes_on(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): - with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): - remotes = network._freebsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_freebsd_remotes_on_with_fat_pid(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): - with patch( - "subprocess.check_output", - return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, - ): - remotes = network._freebsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_netlink_tool_remote_on_a(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_linux", lambda: True): - with patch( - "subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT - ): - remotes = network._netlink_tool_remote_on("4506", "local_port") - self.assertEqual(remotes, {"192.168.122.177", "::ffff:127.0.0.1"}) - - def test_netlink_tool_remote_on_b(self): - with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): - remotes = network._netlink_tool_remote_on("4505", "remote_port") - self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"}) - - def test_openbsd_remotes_on(self): - with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT): - remotes = network._openbsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_openbsd_remotes_on_issue_61966(self): - """ - Test that the command output is correctly converted to string before - treating it as such - """ - with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT.encode()): - remotes = network._openbsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_generate_minion_id_distinct(self): - """ - Test if minion IDs are distinct in the pool. - - :return: - """ - with patch("platform.node", MagicMock(return_value="nodename")), patch( - "socket.gethostname", MagicMock(return_value="hostname") - ), patch( - "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - [ - "hostname.domainname.blank", - "nodename", - "hostname", - "1.2.3.4", - "5.6.7.8", - ], - ) - - def test_generate_minion_id_127_name(self): - """ - Test if minion IDs can be named 127.foo - - :return: - """ - with patch("platform.node", MagicMock(return_value="127")), patch( - "socket.gethostname", MagicMock(return_value="127") - ), patch( - "socket.getfqdn", MagicMock(return_value="127.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - ["127.domainname.blank", "127", "1.2.3.4", "5.6.7.8"], - ) - - def test_generate_minion_id_127_name_startswith(self): - """ - Test if minion IDs can be named starting from "127" - - :return: - """ - with patch("platform.node", MagicMock(return_value="127890")), patch( - "socket.gethostname", MagicMock(return_value="127890") - ), patch( - "socket.getfqdn", MagicMock(return_value="127890.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - ["127890.domainname.blank", "127890", "1.2.3.4", "5.6.7.8"], - ) - - def test_generate_minion_id_duplicate(self): - """ - Test if IP addresses in the minion IDs are distinct in the pool - - :return: - """ - with patch("platform.node", MagicMock(return_value="hostname")), patch( - "socket.gethostname", MagicMock(return_value="hostname") - ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual(network._generate_minion_id(), ["hostname", "1.2.3.4"]) - - def test_generate_minion_id_platform_used(self): - """ - Test if platform.node is used for the first occurrence. - The platform.node is most common hostname resolver before anything else. - - :return: - """ - with patch( - "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") - ), patch("socket.gethostname", MagicMock(return_value="hostname")), patch( - "socket.getfqdn", MagicMock(return_value="") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual( - network.generate_minion_id(), "very.long.and.complex.domain.name" - ) - - def test_generate_minion_id_platform_localhost_filtered(self): - """ - Test if localhost is filtered from the first occurrence. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="pick.me") - ), patch( - "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual(network.generate_minion_id(), "hostname.domainname.blank") - - def test_generate_minion_id_platform_localhost_filtered_all(self): - """ - Test if any of the localhost is filtered from everywhere. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock( - return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"] - ), - ): - self.assertEqual(network.generate_minion_id(), "1.2.3.4") - - def test_generate_minion_id_platform_localhost_only(self): - """ - Test if there is no other choice but localhost. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "localhost") - - def test_generate_minion_id_platform_fqdn(self): - """ - Test if fqdn is picked up. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "pick.me") - - def test_generate_minion_id_platform_localhost_addrinfo(self): - """ - Test if addinfo is picked up. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "pick.me") - - def test_generate_minion_id_platform_ip_addr_only(self): - """ - Test if IP address is the only what is used as a Minion ID in case no DNS name. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock( - return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"] - ), - ): - self.assertEqual(network.generate_minion_id(), "1.2.3.4") - - def test_gen_mac(self): - with patch("random.randint", return_value=1) as random_mock: - self.assertEqual(random_mock.return_value, 1) - ret = network.gen_mac("00:16:3E") - expected_mac = "00:16:3E:01:01:01" - self.assertEqual(ret, expected_mac) - - def test_mac_str_to_bytes(self): - self.assertRaises(ValueError, network.mac_str_to_bytes, "31337") - self.assertRaises(ValueError, network.mac_str_to_bytes, "0001020304056") - self.assertRaises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") - self.assertRaises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") - self.assertEqual( - b"\x10\x08\x06\x04\x02\x00", network.mac_str_to_bytes("100806040200") - ) - self.assertEqual( - b"\xf8\xe7\xd6\xc5\xb4\xa3", network.mac_str_to_bytes("f8e7d6c5b4a3") - ) - - @pytest.mark.slow_test - def test_generate_minion_id_with_long_hostname(self): - """ - Validate the fix for: - - https://github.com/saltstack/salt/issues/51160 - """ - long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" - with patch("socket.gethostname", MagicMock(return_value=long_name)): - # An exception is raised if unicode is passed to socket.getfqdn - minion_id = network.generate_minion_id() - assert minion_id != "", minion_id - - def test_filter_by_networks_with_no_filter(self): - ips = ["10.0.123.200", "10.10.10.10"] - with pytest.raises(TypeError): - network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter - - def test_filter_by_networks_empty_filter(self): - ips = ["10.0.123.200", "10.10.10.10"] - assert network.filter_by_networks(ips, []) == [] - - def test_filter_by_networks_ips_list(self): - ips = [ - "10.0.123.200", - "10.10.10.10", - "193.124.233.5", - "fe80::d210:cf3f:64e7:5423", - ] - networks = ["10.0.0.0/8", "fe80::/64"] - assert network.filter_by_networks(ips, networks) == [ - "10.0.123.200", - "10.10.10.10", - "fe80::d210:cf3f:64e7:5423", - ] - - def test_filter_by_networks_interfaces_dict(self): - interfaces = { - "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], - "eth0": [ - "2001:0DB8:0:CD30:123:4567:89AB:CDEF", - "192.168.1.101", - "10.0.123.201", - ], - } - assert network.filter_by_networks( - interfaces, ["192.168.1.0/24", "2001:db8::/48"] - ) == { - "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], - "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], - } - - def test_filter_by_networks_catch_all(self): - ips = [ - "10.0.123.200", - "10.10.10.10", - "193.124.233.5", - "fe80::d210:cf3f:64e7:5423", - ] - assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) - - def test_ip_networks(self): - # We don't need to test with each platform's ifconfig/iproute2 output, - # since this test isn't testing getting the interfaces. We already have - # tests for that. - interface_data = network._interfaces_ifconfig(LINUX) - - # Without loopback - ret = network.ip_networks(interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, specific interface - ret = network.ip_networks(interface="eth0", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, multiple specific interfaces - ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, specific interface (not present) - ret = network.ip_networks(interface="eth1", interface_data=interface_data) - assert ret == [], ret - # With loopback - ret = network.ip_networks(include_loopback=True, interface_data=interface_data) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret - # With loopback, specific interface - ret = network.ip_networks( - interface="eth0", include_loopback=True, interface_data=interface_data - ) - assert ret == ["10.10.8.0/22"], ret - # With loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", include_loopback=True, interface_data=interface_data - ) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret - # With loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", include_loopback=True, interface_data=interface_data - ) - assert ret == [], ret - - # Verbose, without loopback - ret = network.ip_networks(verbose=True, interface_data=interface_data) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, specific interface - ret = network.ip_networks( - interface="eth0", verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", verbose=True, interface_data=interface_data - ) - assert ret == {}, ret - # Verbose, with loopback - ret = network.ip_networks( - include_loopback=True, verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - "127.0.0.0/8": { - "prefixlen": 8, - "netmask": "255.0.0.0", - "num_addresses": 16777216, - "address": "127.0.0.0", - }, - }, ret - # Verbose, with loopback, specific interface - ret = network.ip_networks( - interface="eth0", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, with loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - "127.0.0.0/8": { - "prefixlen": 8, - "netmask": "255.0.0.0", - "num_addresses": 16777216, - "address": "127.0.0.0", - }, - }, ret - # Verbose, with loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == {}, ret - - def test_ip_networks6(self): - # We don't need to test with each platform's ifconfig/iproute2 output, - # since this test isn't testing getting the interfaces. We already have - # tests for that. - interface_data = network._interfaces_ifconfig(LINUX) - - # Without loopback - ret = network.ip_networks6(interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, specific interface - ret = network.ip_networks6(interface="eth0", interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, multiple specific interfaces - ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, specific interface (not present) - ret = network.ip_networks6(interface="eth1", interface_data=interface_data) - assert ret == [], ret - # With loopback - ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) - assert ret == ["::1/128", "fe80::/64"], ret - # With loopback, specific interface - ret = network.ip_networks6( - interface="eth0", include_loopback=True, interface_data=interface_data - ) - assert ret == ["fe80::/64"], ret - # With loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", include_loopback=True, interface_data=interface_data - ) - assert ret == ["::1/128", "fe80::/64"], ret - # With loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", include_loopback=True, interface_data=interface_data - ) - assert ret == [], ret - - # Verbose, without loopback - ret = network.ip_networks6(verbose=True, interface_data=interface_data) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, specific interface - ret = network.ip_networks6( - interface="eth0", verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", verbose=True, interface_data=interface_data - ) - assert ret == {}, ret - # Verbose, with loopback - ret = network.ip_networks6( - include_loopback=True, verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - "::1/128": { - "prefixlen": 128, - "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - "num_addresses": 1, - "address": "::1", - }, - }, ret - # Verbose, with loopback, specific interface - ret = network.ip_networks6( - interface="eth0", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, with loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - "::1/128": { - "prefixlen": 128, - "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - "num_addresses": 1, - "address": "::1", - }, - }, ret - # Verbose, with loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == {}, ret - - def test_get_fqhostname_return(self): - """ - Test if proper hostname is used when RevDNS differ from hostname - - :return: - """ - with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( - "socket.getfqdn", - MagicMock(return_value="very.long.and.complex.domain.name"), - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ): - self.assertEqual(network.get_fqhostname(), "hostname") - - def test_get_fqhostname_return_empty_hostname(self): - """ - Test if proper hostname is used when hostname returns empty string - """ - host = "hostname" - with patch("socket.gethostname", MagicMock(return_value=host)), patch( - "socket.getfqdn", - MagicMock(return_value="very.long.and.complex.domain.name"), - ), patch( - "socket.getaddrinfo", - MagicMock( - return_value=[ - (2, 3, 0, host, ("127.0.1.1", 0)), - (2, 3, 0, "", ("127.0.1.1", 0)), - ] - ), - ): - self.assertEqual(network.get_fqhostname(), host) - - def test_ip_bracket(self): - test_ipv4 = "127.0.0.1" - test_ipv6 = "::1" - test_ipv6_uri = "[::1]" - self.assertEqual(test_ipv4, network.ip_bracket(test_ipv4)) - self.assertEqual(test_ipv6, network.ip_bracket(test_ipv6_uri, strip=True)) - self.assertEqual("[{}]".format(test_ipv6), network.ip_bracket(test_ipv6)) - self.assertEqual("[{}]".format(test_ipv6), network.ip_bracket(test_ipv6_uri)) - - ip_addr_obj = ipaddress.ip_address(test_ipv4) - self.assertEqual(test_ipv4, network.ip_bracket(ip_addr_obj)) diff --git a/tests/unit/utils/test_parsers.py b/tests/unit/utils/test_parsers.py deleted file mode 100644 index 06e75d5d7a7..00000000000 --- a/tests/unit/utils/test_parsers.py +++ /dev/null @@ -1,1283 +0,0 @@ -""" - :codeauthor: Denys Havrysh -""" - -import logging -import os -import pprint -import shutil -import tempfile - -import salt._logging -import salt.config -import salt.syspaths -import salt.utils.jid -import salt.utils.parsers -import salt.utils.platform -from tests.support.helpers import TstSuiteLoggingHandler -from tests.support.mock import ANY, MagicMock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - - -class ErrorMock: # pylint: disable=too-few-public-methods - """ - Error handling - """ - - def __init__(self): - """ - init - """ - self.msg = None - - def error(self, msg): - """ - Capture error message - """ - self.msg = msg - - -class LogImplMock: - """ - Logger setup - """ - - def __init__(self): - """ - init - """ - self.log_level_console = None - self.log_file = None - self.log_level_logfile = None - self.config = self.original_config = None - logging_options = salt._logging.get_logging_options_dict() - if logging_options: - self.config = logging_options.copy() - self.original_config = self.config.copy() - self.temp_log_level = None - self._console_handler_configured = False - self._extended_logging_configured = False - self._logfile_handler_configured = False - self._real_set_logging_options_dict = salt._logging.set_logging_options_dict - self._real_get_logging_options_dict = salt._logging.get_logging_options_dict - self._real_setup_logfile_handler = salt._logging.setup_logfile_handler - - def _destroy(self): - salt._logging.set_logging_options_dict.__options_dict__ = self.original_config - salt._logging.shutdown_logfile_handler() - - def setup_temp_handler(self, log_level=None): - """ - Set temp handler loglevel - """ - log.debug("Setting temp handler log level to: %s", log_level) - self.temp_log_level = log_level - - def is_console_handler_configured(self): - log.debug("Calling is_console_handler_configured") - return self._console_handler_configured - - def setup_console_handler( - self, log_level="error", **kwargs - ): # pylint: disable=unused-argument - """ - Set console loglevel - """ - log.debug("Setting console handler log level to: %s", log_level) - self.log_level_console = log_level - self._console_handler_configured = True - - def shutdown_console_handler(self): - log.debug("Calling shutdown_console_handler") - self._console_handler_configured = False - - def is_extended_logging_configured(self): - log.debug("Calling is_extended_logging_configured") - return self._extended_logging_configured - - def setup_extended_logging(self, opts): - """ - Set opts - """ - log.debug("Calling setup_extended_logging") - self._extended_logging_configured = True - - def shutdown_extended_logging(self): - log.debug("Calling shutdown_extended_logging") - self._extended_logging_configured = False - - def is_logfile_handler_configured(self): - log.debug("Calling is_logfile_handler_configured") - return self._logfile_handler_configured - - def setup_logfile_handler( - self, log_path, log_level=None, **kwargs - ): # pylint: disable=unused-argument - """ - Set logfile and loglevel - """ - log.debug("Setting log file handler path to: %s", log_path) - log.debug("Setting log file handler log level to: %s", log_level) - self.log_file = log_path - self.log_level_logfile = log_level - self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs) - self._logfile_handler_configured = True - - def shutdown_logfile_handler(self): - log.debug("Calling shutdown_logfile_handler") - self._logfile_handler_configured = False - - def get_logging_options_dict(self): - log.debug("Calling get_logging_options_dict") - return self.config - - def set_logging_options_dict(self, opts): - log.debug("Calling set_logging_options_dict") - self._real_set_logging_options_dict(opts) - self.config = self._real_get_logging_options_dict() - log.debug("Logging options dict:\n%s", pprint.pformat(self.config)) - - def setup_log_granular_levels(self, opts): - log.debug("Calling setup_log_granular_levels") - - def setup_logging(self): - log.debug("Mocked setup_logging called") - # Wether daemonizing or not, either on the main process or on a separate process - # The log file is going to be configured. - # The console is the only handler not configured if daemonizing - - # These routines are what happens on salt._logging.setup_logging - opts = self.get_logging_options_dict() - - if ( - opts.get("configure_console_logger", True) - and not self.is_console_handler_configured() - ): - self.setup_console_handler( - log_level=opts["log_level"], - log_format=opts["log_fmt_console"], - date_format=opts["log_datefmt"], - ) - if ( - opts.get("configure_file_logger", True) - and not self.is_logfile_handler_configured() - ): - log_file_level = opts["log_level_logfile"] or opts["log_level"] - if log_file_level != "quiet": - self.setup_logfile_handler( - log_path=opts[opts["log_file_key"]], - log_level=log_file_level, - log_format=opts["log_fmt_logfile"], - date_format=opts["log_datefmt_logfile"], - max_bytes=opts["log_rotate_max_bytes"], - backup_count=opts["log_rotate_backup_count"], - user=opts["user"], - ) - if not self.is_extended_logging_configured(): - self.setup_extended_logging(opts) - self.setup_log_granular_levels(opts["log_granular_levels"]) - - -class ObjectView: # pylint: disable=too-few-public-methods - """ - Dict object view - """ - - def __init__(self, d): - self.__dict__ = d - - -class ParserBase: - """ - Unit Tests for Log Level Mixin with Salt parsers - """ - - args = [] - - log_impl = None - - # Set config option names - loglevel_config_setting_name = "log_level" - logfile_config_setting_name = "log_file" - logfile_loglevel_config_setting_name = ( - "log_level_logfile" # pylint: disable=invalid-name - ) - - @classmethod - def setUpClass(cls): - cls.root_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.root_dir, ignore_errors=True) - - def setup_log(self): - """ - Mock logger functions - """ - testing_config = self.default_config.copy() - testing_config["root_dir"] = self.root_dir - for name in ("pki_dir", "cachedir"): - testing_config[name] = name - testing_config[self.logfile_config_setting_name] = getattr( - self, self.logfile_config_setting_name, self.log_file - ) - self.testing_config = testing_config - self.addCleanup(setattr, self, "testing_config", None) - - self.log_impl = LogImplMock() - self.addCleanup(self.log_impl._destroy) - self.addCleanup(setattr, self, "log_impl", None) - - mocked_functions = {} - for name in dir(self.log_impl): - if name.startswith("_"): - continue - func = getattr(self.log_impl, name) - if not callable(func): - continue - mocked_functions[name] = func - patcher = patch.multiple(salt._logging, **mocked_functions) - patcher.start() - self.addCleanup(patcher.stop) - - # log level configuration tests - - def test_get_log_level_cli(self): - """ - Tests that log level match command-line specified value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log level in CLI - log_level = "critical" - args = ["--log-level", log_level] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check console log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, log_level) - # Check log file logger log level - self.assertEqual(self.log_impl.log_level_logfile, default_log_level) - - def test_get_log_level_config(self): - """ - Tests that log level match the configured value - """ - args = self.args - - # Set log level in config - log_level = "info" - opts = self.testing_config.copy() - opts.update({self.loglevel_config_setting_name: log_level}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check console log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file logger log level - self.assertEqual(self.log_impl.log_level_logfile, log_level) - - def test_get_log_level_default(self): - """ - Tests that log level match the default value - """ - # Set defaults - log_level = default_log_level = self.testing_config[ - self.loglevel_config_setting_name - ] - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, default_log_level) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_level), - parser.get_option("--log-level").help, - ) - - # log file configuration tests - - def test_get_log_file_cli(self): - """ - Tests that log file match command-line specified value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log file in CLI - log_file = "{}_cli.log".format(self.log_file) - args = ["--log-file", log_file] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - - def test_get_log_file_config(self): - """ - Tests that log file match the configured value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - args = self.args - - # Set log file in config - log_file = "{}_config.log".format(self.log_file) - opts = self.testing_config.copy() - opts.update({self.logfile_config_setting_name: log_file}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - - def test_get_log_file_default(self): - """ - Tests that log file match the default value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - log_file = self.testing_config[self.logfile_config_setting_name] - default_log_file = self.default_config[self.logfile_config_setting_name] - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_file), - parser.get_option("--log-file").help, - ) - - # log file log level configuration tests - - def test_get_log_file_level_cli(self): - """ - Tests that file log level match command-line specified value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log file level in CLI - log_level_logfile = "error" - args = ["--log-file-level", log_level_logfile] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, default_log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], - default_log_level, - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_get_log_file_level_config(self): - """ - Tests that log file level match the configured value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - args = self.args - - # Set log file level in config - log_level_logfile = "info" - opts = self.testing_config.copy() - opts.update({self.logfile_loglevel_config_setting_name: log_level_logfile}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_get_log_file_level_default(self): - """ - Tests that log file level match the default value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - log_level = default_log_level - log_level_logfile = default_log_level - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_level), - parser.get_option("--log-file-level").help, - ) - - def test_get_console_log_level_with_file_log_level( - self, - ): # pylint: disable=invalid-name - """ - Tests that both console log level and log file level setting are working together - """ - log_level = "critical" - log_level_logfile = "debug" - - args = ["--log-file-level", log_level_logfile] + self.args - - opts = self.testing_config.copy() - opts.update({self.loglevel_config_setting_name: log_level}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_log_created(self): - """ - Tests that log file is created - """ - args = self.args - log_file = self.log_file - log_file_name = self.logfile_config_setting_name - opts = self.testing_config.copy() - opts.update({"log_file": log_file}) - if log_file_name != "log_file": - opts.update({log_file_name: getattr(self, log_file_name)}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - if log_file_name == "log_file": - self.assertGreaterEqual(os.path.getsize(log_file), 0) - else: - self.assertGreaterEqual(os.path.getsize(getattr(self, log_file_name)), 0) - - def test_callbacks_uniqueness(self): - """ - Test that the callbacks are only added once, no matter - how many instances of the parser we create - """ - mixin_container_names = ( - "_mixin_setup_funcs", - "_mixin_process_funcs", - "_mixin_after_parsed_funcs", - "_mixin_before_exit_funcs", - ) - parser = self.parser() - nums_1 = {} - for cb_container in mixin_container_names: - obj = getattr(parser, cb_container) - nums_1[cb_container] = len(obj) - - # The next time we instantiate the parser, the counts should be equal - parser = self.parser() - nums_2 = {} - for cb_container in mixin_container_names: - obj = getattr(parser, cb_container) - nums_2[cb_container] = len(obj) - self.assertDictEqual(nums_1, nums_2) - - def test_verify_log_warning_logged(self): - args = ["--log-level", "debug"] + self.args - with TstSuiteLoggingHandler(level=logging.DEBUG) as handler: - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - self.assertIn( - "WARNING:Insecure logging configuration detected! Sensitive data may be logged.", - handler.messages, - ) - - -class MasterOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_master_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.MasterOptionParser - self.addCleanup(delattr, self, "parser") - - -class MinionOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_minion_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.minion_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.MinionOptionParser - self.addCleanup(delattr, self, "parser") - - -class ProxyMinionOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Proxy Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_PROXY_MINION_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_proxy_minion_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.proxy_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.ProxyMinionOptionParser - self.addCleanup(delattr, self, "parser") - - -class SyndicOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Syndic options - """ - - def setUp(self): - """ - Setting up - """ - # Set config option names - self.logfile_config_setting_name = "syndic_log_file" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_syndic_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - syndic_log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_syndic_log", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.syndic_log_file = syndic_log_file.name - syndic_log_file.close() - # Function to patch - self.config_func = "salt.config.syndic_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SyndicOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCMDOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt CLI options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar.baz"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cmd_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.client_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCMDOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCPOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing salt-cp options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar", "baz"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cp_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCPOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltKeyOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing salt-key options - """ - - def setUp(self): - """ - Setting up - """ - # Set config option names - self.logfile_config_setting_name = "key_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_key_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - key_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_key_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.key_logfile = key_logfile.name - key_logfile.close() - # Function to patch - self.config_func = "salt.config.client_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltKeyOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCallOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo.bar"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_call_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.minion_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCallOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltRunOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo.bar"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_run_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltRunOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltSSHOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar.baz"] - - # Set config option names - self.logfile_config_setting_name = "ssh_log_file" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_ssh_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - ssh_log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_ssh_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.ssh_log_file = ssh_log_file.name - ssh_log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltSSHOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCloudParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["-p", "foo", "bar"] - - # Set default configs - # Cloud configs are merged with master configs in - # config/__init__.py, so we'll do that here as well - # As we need the 'user' key later on. - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_CLOUD_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cloud_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.cloud_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCloudParser - self.addCleanup(delattr, self, "parser") - - -class SPMParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar"] - - # Set config option names - self.logfile_config_setting_name = "spm_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_SPM_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_spm_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - spm_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_spm_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.spm_logfile = spm_logfile.name - spm_logfile.close() - # Function to patch - self.config_func = "salt.config.spm_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SPMParser - self.addCleanup(delattr, self, "parser") - - -class SaltAPIParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = [] - - # Set config option names - self.logfile_config_setting_name = "api_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_API_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_api_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - api_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_api_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.api_logfile = api_logfile.name - api_logfile.close() - # Function to patch - self.config_func = "salt.config.api_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltAPIParser - self.addCleanup(delattr, self, "parser") - - -class DaemonMixInTestCase(TestCase): - """ - Tests the PIDfile deletion in the DaemonMixIn. - """ - - def setUp(self): - """ - Setting up - """ - # Setup mixin - self.daemon_mixin = salt.utils.parsers.DaemonMixIn() - self.daemon_mixin.config = {} - self.daemon_mixin.config["pidfile"] = "/some/fake.pid" - - def tearDown(self): - """ - Tear down test - :return: - """ - del self.daemon_mixin - - @patch("os.unlink", MagicMock()) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_file_deletion(self): - """ - PIDfile deletion without exception. - """ - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_not_called() - salt.utils.parsers.log.debug.assert_not_called() - - @patch("os.unlink", MagicMock(side_effect=OSError())) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_deleted_oserror_as_root(self): - """ - PIDfile deletion with exception, running as root. - """ - if salt.utils.platform.is_windows(): - patch_args = ( - "salt.utils.win_functions.is_admin", - MagicMock(return_value=True), - ) - else: - patch_args = ("os.getuid", MagicMock(return_value=0)) - - with patch(*patch_args): - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_called_with( - "PIDfile(%s) could not be deleted: %s", - format(self.daemon_mixin.config["pidfile"], ""), - ANY, - exc_info_on_loglevel=logging.DEBUG, - ) - - @patch("os.unlink", MagicMock(side_effect=OSError())) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_deleted_oserror_as_non_root(self): - """ - PIDfile deletion with exception, running as non-root. - """ - if salt.utils.platform.is_windows(): - patch_args = ( - "salt.utils.win_functions.is_admin", - MagicMock(return_value=False), - ) - else: - patch_args = ("os.getuid", MagicMock(return_value=1000)) - - with patch(*patch_args): - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_not_called() - salt.utils.parsers.log.debug.assert_not_called() diff --git a/tools/__init__.py b/tools/__init__.py index 01f3e188441..8b08111dc8a 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -1,7 +1,36 @@ import logging +import pathlib +import sys import ptscripts +from ptscripts.parser import DefaultRequirementsConfig +from ptscripts.virtualenv import VirtualEnvConfig +REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent +REQUIREMENTS_FILES_PATH = REPO_ROOT / "requirements" +STATIC_REQUIREMENTS_PATH = REQUIREMENTS_FILES_PATH / "static" +CI_REQUIREMENTS_FILES_PATH = ( + STATIC_REQUIREMENTS_PATH / "ci" / "py{}.{}".format(*sys.version_info) +) +PKG_REQUIREMENTS_FILES_PATH = ( + STATIC_REQUIREMENTS_PATH / "pkg" / "py{}.{}".format(*sys.version_info) +) +DEFAULT_REQS_CONFIG = DefaultRequirementsConfig( + pip_args=[ + f"--constraint={PKG_REQUIREMENTS_FILES_PATH / 'linux.txt'}", + ], + requirements_files=[ + REQUIREMENTS_FILES_PATH / "base.txt", + CI_REQUIREMENTS_FILES_PATH / "tools.txt", + ], +) +RELEASE_VENV_CONFIG = VirtualEnvConfig( + requirements_files=[ + CI_REQUIREMENTS_FILES_PATH / "tools-virustotal.txt", + ], + add_as_extra_site_packages=True, +) +ptscripts.set_default_requirements_config(DEFAULT_REQS_CONFIG) ptscripts.register_tools_module("tools.changelog") ptscripts.register_tools_module("tools.ci") ptscripts.register_tools_module("tools.docs") @@ -10,8 +39,14 @@ ptscripts.register_tools_module("tools.pkg.repo") ptscripts.register_tools_module("tools.pkg.build") ptscripts.register_tools_module("tools.pkg.repo.create") ptscripts.register_tools_module("tools.pkg.repo.publish") -ptscripts.register_tools_module("tools.pre_commit") -ptscripts.register_tools_module("tools.release") +ptscripts.register_tools_module("tools.precommit") +ptscripts.register_tools_module("tools.precommit.changelog") +ptscripts.register_tools_module("tools.precommit.workflows") +ptscripts.register_tools_module("tools.precommit.docs") +ptscripts.register_tools_module("tools.precommit.docstrings") +ptscripts.register_tools_module("tools.precommit.filemap") +ptscripts.register_tools_module("tools.precommit.loader") +ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) ptscripts.register_tools_module("tools.testsuite") ptscripts.register_tools_module("tools.testsuite.download") ptscripts.register_tools_module("tools.vm") diff --git a/tools/changelog.py b/tools/changelog.py index d4d8b662829..12bbba22d3c 100644 --- a/tools/changelog.py +++ b/tools/changelog.py @@ -8,7 +8,6 @@ import datetime import logging import os import pathlib -import re import sys import textwrap @@ -17,19 +16,6 @@ from ptscripts import Context, command_group from tools.utils import REPO_ROOT, Version -CHANGELOG_LIKE_RE = re.compile(r"([\d]+)\.([a-z]+)$") -CHANGELOG_TYPES = ( - "removed", - "deprecated", - "changed", - "fixed", - "added", - "security", -) -CHANGELOG_ENTRY_RE = re.compile( - r"([\d]+|(CVE|cve)-[\d]{{4}}-[\d]+)\.({})(\.md)?$".format("|".join(CHANGELOG_TYPES)) -) - log = logging.getLogger(__name__) # Define the command group @@ -50,103 +36,6 @@ changelog = command_group( ) -@changelog.command( - name="pre-commit-checks", - arguments={ - "files": { - "nargs": "*", - } - }, -) -def check_changelog_entries(ctx: Context, files: list[pathlib.Path]): - """ - Run pre-commit checks on changelog snippets. - """ - docs_path = REPO_ROOT / "doc" - tests_integration_files_path = REPO_ROOT / "tests" / "integration" / "files" - changelog_entries_path = REPO_ROOT / "changelog" - exitcode = 0 - for entry in files: - path = pathlib.Path(entry).resolve() - # Is it under changelog/ - try: - path.relative_to(changelog_entries_path) - if path.name in (".keep", ".template.jinja"): - # This is the file we use so git doesn't delete the changelog/ directory - continue - # Is it named properly - if not CHANGELOG_ENTRY_RE.match(path.name): - ctx.error( - "The changelog entry '{}' should have one of the following extensions: {}.".format( - path.relative_to(REPO_ROOT), - ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), - ), - ) - exitcode = 1 - continue - if path.suffix != ".md": - ctx.error( - f"Please rename '{path.relative_to(REPO_ROOT)}' to " - f"'{path.relative_to(REPO_ROOT)}.md'" - ) - exitcode = 1 - continue - except ValueError: - # No, carry on - pass - # Does it look like a changelog entry - if CHANGELOG_LIKE_RE.match(path.name) and not CHANGELOG_ENTRY_RE.match( - path.name - ): - try: - # Is this under doc/ - path.relative_to(docs_path) - # Yes, carry on - continue - except ValueError: - # No, resume the check - pass - try: - # Is this under tests/integration/files - path.relative_to(tests_integration_files_path) - # Yes, carry on - continue - except ValueError: - # No, resume the check - pass - ctx.error( - "The changelog entry '{}' should have one of the following extensions: {}.".format( - path.relative_to(REPO_ROOT), - ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), - ) - ) - exitcode = 1 - continue - # Is it a changelog entry - if not CHANGELOG_ENTRY_RE.match(path.name): - # No? Carry on - continue - # Is the changelog entry in the right path? - try: - path.relative_to(changelog_entries_path) - except ValueError: - exitcode = 1 - ctx.error( - "The changelog entry '{}' should be placed under '{}/', not '{}'".format( - path.name, - changelog_entries_path.relative_to(REPO_ROOT), - path.relative_to(REPO_ROOT).parent, - ) - ) - if path.suffix != ".md": - ctx.error( - f"Please rename '{path.relative_to(REPO_ROOT)}' to " - f"'{path.relative_to(REPO_ROOT)}.md'" - ) - exitcode = 1 - ctx.exit(exitcode) - - def _get_changelog_contents(ctx: Context, version: Version): """ Return the full changelog generated by towncrier. diff --git a/tools/ci.py b/tools/ci.py index 136b6cee459..9c69ebb7a44 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -25,6 +25,17 @@ if sys.version_info < (3, 11): else: from typing import NotRequired, TypedDict # pylint: disable=no-name-in-module +try: + import boto3 +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + log = logging.getLogger(__name__) # Define the command group @@ -49,7 +60,7 @@ def print_gh_event(ctx: Context): try: gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: - ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) # type: ignore[arg-type] ctx.exit(1) ctx.info("GH Event Payload:") @@ -171,7 +182,7 @@ def runner_types(ctx: Context, event_name: str): try: gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: - ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) # type: ignore[arg-type] ctx.exit(1) ctx.info("GH Event Payload:") @@ -319,7 +330,7 @@ def define_jobs( gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: ctx.error( - f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc # type: ignore[arg-type] ) ctx.exit(1) @@ -466,7 +477,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: ctx.error( - f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc # type: ignore[arg-type] ) ctx.exit(1) @@ -626,9 +637,18 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): "workflow": { "help": "Which workflow is running", }, + "fips": { + "help": "Include FIPS entries in the matrix", + }, }, ) -def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = "ci"): +def matrix( + ctx: Context, + distro_slug: str, + full: bool = False, + workflow: str = "ci", + fips: bool = False, +): """ Generate the test matrix. """ @@ -652,6 +672,9 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = " if transport == "tcp": if distro_slug not in ( "centosstream-9", + "centosstream-9-arm64", + "photonos-5", + "photonos-5-arm64", "ubuntu-22.04", "ubuntu-22.04-arm64", ): @@ -675,8 +698,18 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = " "test-group-count": splits, } ) + if fips is True and distro_slug.startswith( + ("photonos-4", "photonos-5") + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) else: _matrix.append({"transport": transport, "tests-chunk": chunk}) + if fips is True and distro_slug.startswith( + ("photonos-4", "photonos-5") + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) @@ -702,6 +735,9 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = " "nargs": "+", "required": True, }, + "fips": { + "help": "Include FIPS entries in the matrix", + }, }, ) def pkg_matrix( @@ -709,6 +745,7 @@ def pkg_matrix( distro_slug: str, pkg_type: str, testing_releases: list[tools.utils.Version] = None, + fips: bool = False, ): """ Generate the test matrix. @@ -718,35 +755,6 @@ def pkg_matrix( ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") if TYPE_CHECKING: assert testing_releases - _matrix = [] - sessions = [ - "install", - ] - if ( - distro_slug - not in [ - "debian-11-arm64", - # TODO: remove debian 12 once debian 12 pkgs are released - "debian-12-arm64", - "debian-12", - "ubuntu-20.04-arm64", - "ubuntu-22.04-arm64", - "photonos-3", - "photonos-3-arm64", - "photonos-4", - "photonos-4-arm64", - "photonos-5", - "photonos-5-arm64", - ] - and pkg_type != "MSI" - ): - # These OS's never had arm64 packages built for them - # with the tiamat onedir packages. - # we will need to ensure when we release 3006.0 - # we allow for 3006.0 jobs to run, because then - # we will have arm64 onedir packages to upgrade from - sessions.append("upgrade") - sessions.append("downgrade") still_testing_3005 = False for release_version in testing_releases: @@ -758,57 +766,111 @@ def pkg_matrix( if still_testing_3005 is False: ctx.error( f"No longer testing 3005.x releases please update {__file__} " - "and remove this error and the logic above the error" + "and remove this error and the logic above the error. There may " + "be other places that need code removed as well." ) ctx.exit(1) - # TODO: Remove this block when we reach version 3009.0, we will no longer be testing upgrades from classic packages - if ( - distro_slug - not in [ - "centosstream-9", - "debian-11-arm64", - "debian-12-arm64", - "debian-12", - "ubuntu-22.04", - "ubuntu-22.04-arm64", - "photonos-3", - "photonos-3-arm64", - "photonos-4", - "photonos-4-arm64", - "photonos-5", - "photonos-5-arm64", - ] - and pkg_type != "MSI" - ): - # Packages for these OSs where never built for classic previously - sessions.append("upgrade-classic") - sessions.append("downgrade-classic") + adjusted_versions = [] + for ver in testing_releases: + if ver < tools.utils.Version("3006.0"): + adjusted_versions.append((ver, "classic")) + adjusted_versions.append((ver, "tiamat")) + else: + adjusted_versions.append((ver, "relenv")) + ctx.info(f"Will look for the following versions: {adjusted_versions}") - for session in sessions: - versions: list[str | None] = [None] - if session in ("upgrade", "downgrade"): - versions = [str(version) for version in testing_releases] - elif session in ("upgrade-classic", "downgrade-classic"): - versions = [ - str(version) - for version in testing_releases - if version < tools.utils.Version("3006.0") - ] - for version in versions: - if ( - version - and distro_slug.startswith("photonos-5") - and version < tools.utils.Version("3007.0") - ): - # We never build packages for Photon OS 5 prior to 3007.0 - continue - _matrix.append( - { - "test-chunk": session, - "version": version, - } + # Filter out the prefixes to look under + if "macos-" in distro_slug: + # We don't have golden images for macos, handle these separately + prefixes = { + "classic": "osx/", + "tiamat": "salt/py3/macos/minor/", + "relenv": "salt/py3/macos/minor/", + } + else: + parts = distro_slug.split("-") + name = parts[0] + version = parts[1] + if name in ("debian", "ubuntu"): + arch = "amd64" + elif name in ("centos", "centosstream", "amazonlinux", "photonos"): + arch = "x86_64" + if len(parts) > 2: + arch = parts[2] + if name == "amazonlinux": + name = "amazon" + if "centos" in name: + name = "redhat" + if "photon" in name: + name = "photon" + if name == "windows": + prefixes = { + "classic": "windows/", + "tiamat": "salt/py3/windows/minor", + "relenv": "salt/py3/windows/minor", + } + else: + prefixes = { + "classic": f"py3/{name}/{version}/{arch}/", + "tiamat": f"salt/py3/{name}/{version}/{arch}/minor/", + "relenv": f"salt/py3/{name}/{version}/{arch}/minor/", + } + + s3 = boto3.client("s3") + paginator = s3.get_paginator("list_objects_v2") + _matrix = [ + { + "test-chunk": "install", + "version": None, + } + ] + + for version, backend in adjusted_versions: + prefix = prefixes[backend] + # TODO: Remove this after 3009.0 + if backend == "relenv" and version >= tools.utils.Version("3006.5"): + prefix.replace("/arm64/", "/aarch64/") + # Using a paginator allows us to list recursively and avoid the item limit + page_iterator = paginator.paginate( + Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release", + Prefix=prefix, + ) + # Uses a jmespath expression to test if the wanted version is in any of the filenames + key_filter = f"Contents[?contains(Key, '{version}')][]" + if pkg_type == "MSI": + # TODO: Add this back when we add MSI upgrade and downgrade tests + # key_filter = f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.msi')]" + continue + elif pkg_type == "NSIS": + key_filter = ( + f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.exe')]" ) + objects = list(page_iterator.search(key_filter)) + # Testing using `any` because sometimes the paginator returns `[None]` + if any(objects): + ctx.info( + f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}" + ) + for session in ("upgrade", "downgrade"): + if backend == "classic": + session += "-classic" + _matrix.append( + { + "test-chunk": session, + "version": str(version), + } + ) + if ( + backend == "relenv" + and fips is True + and distro_slug.startswith(("photonos-4", "photonos-5")) + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) + else: + ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") + ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) @@ -868,7 +930,7 @@ def get_release_changelog_target(ctx: Context, event_name: str): try: gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: - ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) # type: ignore[arg-type] ctx.exit(1) github_output = os.environ.get("GITHUB_OUTPUT") @@ -930,12 +992,12 @@ def get_pr_test_labels( gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: ctx.error( - f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc # type: ignore[arg-type] ) ctx.exit(1) if "pull_request" not in gh_event: - ctx.warning("The 'pull_request' key was not found on the event payload.") + ctx.warn("The 'pull_request' key was not found on the event payload.") ctx.exit(1) pr = gh_event["pull_request"]["number"] @@ -1105,7 +1167,7 @@ def define_cache_seed(ctx: Context, static_cache_seed: str, randomize: bool = Fa gh_event = json.loads(open(gh_event_path).read()) except Exception as exc: ctx.error( - f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc # type: ignore[arg-type] ) ctx.exit(1) @@ -1156,7 +1218,11 @@ def upload_coverage(ctx: Context, reports_path: pathlib.Path, commit_sha: str = ctx.error("Could not find the path to the 'codecov' binary") ctx.exit(1) - codecov_args = [ + if TYPE_CHECKING: + assert codecov is not None + assert commit_sha is not None + + codecov_args: list[str] = [ codecov, "--nonZero", "--sha", @@ -1172,7 +1238,7 @@ def upload_coverage(ctx: Context, reports_path: pathlib.Path, commit_sha: str = codecov_args.extend(["--parent", pr_event_data["base"]["sha"]]) except Exception as exc: ctx.error( - f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc # type: ignore[arg-type] ) sleep_time = 15 diff --git a/tools/pkg/__init__.py b/tools/pkg/__init__.py index 5b97eaa6778..05612996655 100644 --- a/tools/pkg/__init__.py +++ b/tools/pkg/__init__.py @@ -154,7 +154,7 @@ def set_salt_version( ret = venv.run_code(code, capture=True, check=False) if ret.returncode: ctx.error(ret.stderr.decode()) - ctx.exit(ctx.returncode) + ctx.exit(ret.returncode) salt_version = ret.stdout.strip().decode() if not tools.utils.REPO_ROOT.joinpath("salt").is_dir(): diff --git a/tools/pkg/repo/__init__.py b/tools/pkg/repo/__init__.py index 5599bfd5722..e48671051f2 100644 --- a/tools/pkg/repo/__init__.py +++ b/tools/pkg/repo/__init__.py @@ -8,27 +8,16 @@ import logging import os import pathlib import shutil -import sys from typing import TYPE_CHECKING +import boto3 +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.pkg import tools.utils from tools.utils import Version, get_salt_releases -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) # Define the command group diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index a131c5fa32b..a665340098c 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -10,11 +10,11 @@ import logging import os import pathlib import shutil -import sys import textwrap from datetime import datetime from typing import TYPE_CHECKING +import boto3 from ptscripts import Context, command_group import tools.pkg @@ -26,17 +26,6 @@ from tools.utils.repo import ( get_repo_json_file_contents, ) -try: - import boto3 -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) create = command_group( @@ -157,7 +146,7 @@ def debian( distro_details = _deb_distro_info[distro][distro_version] ctx.info("Distribution Details:") - ctx.info(distro_details) + ctx.print(distro_details, soft_wrap=True) if TYPE_CHECKING: assert isinstance(distro_details["label"], str) assert isinstance(distro_details["codename"], str) @@ -320,7 +309,7 @@ def debian( _rpm_distro_info = { - "amazon": ["2"], + "amazon": ["2", "2023"], "redhat": ["7", "8", "9"], "fedora": ["36", "37", "38"], "photon": ["3", "4", "5"], @@ -396,10 +385,6 @@ def rpm( ctx.error(f"Support for {display_name} is missing.") ctx.exit(1) - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - ctx.info("Creating repository directory structure ...") create_repo_path = create_top_level_repo_path( ctx, diff --git a/tools/pkg/repo/publish.py b/tools/pkg/repo/publish.py index 3ad0ec9e428..2a743ac046b 100644 --- a/tools/pkg/repo/publish.py +++ b/tools/pkg/repo/publish.py @@ -10,12 +10,13 @@ import logging import os import pathlib import re -import sys import tempfile import textwrap from typing import TYPE_CHECKING, Any +import boto3 import packaging.version +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.pkg @@ -24,18 +25,6 @@ import tools.utils.repo from tools.utils import Version, get_salt_releases, parse_versions from tools.utils.repo import create_full_repo_path, get_repo_json_file_contents -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) publish = command_group( diff --git a/tools/precommit/__init__.py b/tools/precommit/__init__.py new file mode 100644 index 00000000000..c10eadeb479 --- /dev/null +++ b/tools/precommit/__init__.py @@ -0,0 +1,49 @@ +""" +These commands, and sub-commands, are used by pre-commit. +""" +from ptscripts import command_group + +import tools.utils + +# Define the command group +cgroup = command_group( + name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ +) + +SALT_BASE_PATH = tools.utils.REPO_ROOT / "salt" + +SALT_INTERNAL_LOADERS_PATHS = ( + # This is a 1:1 copy of SALT_INTERNAL_LOADERS_PATHS found in salt/loader/__init__.py + str(SALT_BASE_PATH / "auth"), + str(SALT_BASE_PATH / "beacons"), + str(SALT_BASE_PATH / "cache"), + str(SALT_BASE_PATH / "client" / "ssh" / "wrapper"), + str(SALT_BASE_PATH / "cloud" / "clouds"), + str(SALT_BASE_PATH / "engines"), + str(SALT_BASE_PATH / "executors"), + str(SALT_BASE_PATH / "fileserver"), + str(SALT_BASE_PATH / "grains"), + str(SALT_BASE_PATH / "log_handlers"), + str(SALT_BASE_PATH / "matchers"), + str(SALT_BASE_PATH / "metaproxy"), + str(SALT_BASE_PATH / "modules"), + str(SALT_BASE_PATH / "netapi"), + str(SALT_BASE_PATH / "output"), + str(SALT_BASE_PATH / "pillar"), + str(SALT_BASE_PATH / "proxy"), + str(SALT_BASE_PATH / "queues"), + str(SALT_BASE_PATH / "renderers"), + str(SALT_BASE_PATH / "returners"), + str(SALT_BASE_PATH / "roster"), + str(SALT_BASE_PATH / "runners"), + str(SALT_BASE_PATH / "sdb"), + str(SALT_BASE_PATH / "serializers"), + str(SALT_BASE_PATH / "spm" / "pkgdb"), + str(SALT_BASE_PATH / "spm" / "pkgfiles"), + str(SALT_BASE_PATH / "states"), + str(SALT_BASE_PATH / "thorium"), + str(SALT_BASE_PATH / "tokens"), + str(SALT_BASE_PATH / "tops"), + str(SALT_BASE_PATH / "utils"), + str(SALT_BASE_PATH / "wheel"), +) diff --git a/tools/precommit/changelog.py b/tools/precommit/changelog.py new file mode 100644 index 00000000000..5e108af5f11 --- /dev/null +++ b/tools/precommit/changelog.py @@ -0,0 +1,146 @@ +""" +These commands are used to validate changelog entries +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import pathlib +import re +import sys + +from ptscripts import Context, command_group + +import tools.utils + +log = logging.getLogger(__name__) + +CHANGELOG_LIKE_RE = re.compile(r"([\d]+)\.([a-z]+)$") +CHANGELOG_TYPES = ( + "removed", + "deprecated", + "changed", + "fixed", + "added", + "security", +) +CHANGELOG_ENTRY_RE = re.compile( + r"([\d]+|(CVE|cve)-[\d]{{4}}-[\d]+)\.({})(\.md)?$".format("|".join(CHANGELOG_TYPES)) +) + +# Define the command group +changelog = command_group( + name="changelog", + help="Changelog tools", + description=__doc__, + venv_config={ + "requirements_files": [ + tools.utils.REPO_ROOT + / "requirements" + / "static" + / "ci" + / "py{}.{}".format(*sys.version_info) + / "changelog.txt" + ], + }, + parent="pre-commit", +) + + +@changelog.command( + name="pre-commit-checks", + arguments={ + "files": { + "nargs": "*", + } + }, +) +def check_changelog_entries(ctx: Context, files: list[pathlib.Path]): + """ + Run pre-commit checks on changelog snippets. + """ + docs_path = tools.utils.REPO_ROOT / "doc" + tests_integration_files_path = ( + tools.utils.REPO_ROOT / "tests" / "integration" / "files" + ) + changelog_entries_path = tools.utils.REPO_ROOT / "changelog" + exitcode = 0 + for entry in files: + path = pathlib.Path(entry).resolve() + # Is it under changelog/ + try: + path.relative_to(changelog_entries_path) + if path.name in (".keep", ".template.jinja"): + # This is the file we use so git doesn't delete the changelog/ directory + continue + # Is it named properly + if not CHANGELOG_ENTRY_RE.match(path.name): + ctx.error( + "The changelog entry '{}' should have one of the following extensions: {}.".format( + path.relative_to(tools.utils.REPO_ROOT), + ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), + ), + ) + exitcode = 1 + continue + if path.suffix != ".md": + ctx.error( + f"Please rename '{path.relative_to(tools.utils.REPO_ROOT)}' to " + f"'{path.relative_to(tools.utils.REPO_ROOT)}.md'" + ) + exitcode = 1 + continue + except ValueError: + # No, carry on + pass + # Does it look like a changelog entry + if CHANGELOG_LIKE_RE.match(path.name) and not CHANGELOG_ENTRY_RE.match( + path.name + ): + try: + # Is this under doc/ + path.relative_to(docs_path) + # Yes, carry on + continue + except ValueError: + # No, resume the check + pass + try: + # Is this under tests/integration/files + path.relative_to(tests_integration_files_path) + # Yes, carry on + continue + except ValueError: + # No, resume the check + pass + ctx.error( + "The changelog entry '{}' should have one of the following extensions: {}.".format( + path.relative_to(tools.utils.REPO_ROOT), + ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), + ) + ) + exitcode = 1 + continue + # Is it a changelog entry + if not CHANGELOG_ENTRY_RE.match(path.name): + # No? Carry on + continue + # Is the changelog entry in the right path? + try: + path.relative_to(changelog_entries_path) + except ValueError: + exitcode = 1 + ctx.error( + "The changelog entry '{}' should be placed under '{}/', not '{}'".format( + path.name, + changelog_entries_path.relative_to(tools.utils.REPO_ROOT), + path.relative_to(tools.utils.REPO_ROOT).parent, + ) + ) + if path.suffix != ".md": + ctx.error( + f"Please rename '{path.relative_to(tools.utils.REPO_ROOT)}' to " + f"'{path.relative_to(tools.utils.REPO_ROOT)}.md'" + ) + exitcode = 1 + ctx.exit(exitcode) diff --git a/tasks/docs.py b/tools/precommit/docs.py similarity index 71% rename from tasks/docs.py rename to tools/precommit/docs.py index 323d14a0a1f..a549a6cecf3 100644 --- a/tasks/docs.py +++ b/tools/precommit/docs.py @@ -1,9 +1,8 @@ """ - tasks.docstrings - ~~~~~~~~~~~~~~~~ - - Check salt code base for for missing or wrong docstrings +Check salt code base for for missing or wrong docs """ +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations import ast import collections @@ -11,21 +10,18 @@ import os import pathlib import re -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group -from tasks import utils +import tools.utils -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -DOCS_DIR = CODE_DIR / "doc" -SALT_CODE_DIR = CODE_DIR / "salt" +DOCS_DIR = tools.utils.REPO_ROOT / "doc" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" -os.chdir(str(CODE_DIR)) - -python_module_to_doc_path = {} -doc_path_to_python_module = {} +PYTHON_MODULE_TO_DOC_PATH = {} +DOC_PATH_TO_PYTHON_MODULE = {} -check_paths = ( +CHECK_PATHS = ( "salt/auth", "salt/beacons", "salt/cache", @@ -52,12 +48,14 @@ check_paths = ( "salt/tops", "salt/wheel", ) -exclude_paths = ( +EXCLUDE_PATHS = ( "salt/cloud/cli.py", "salt/cloud/exceptions.py", "salt/cloud/libcloudfuncs.py", ) +cgroup = command_group(name="docs", help=__doc__, parent="pre-commit") + def build_path_cache(): """ @@ -65,13 +63,13 @@ def build_path_cache(): """ for path in SALT_CODE_DIR.rglob("*.py"): - path = path.resolve().relative_to(CODE_DIR) + path = path.resolve().relative_to(tools.utils.REPO_ROOT) strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue parts = list(path.parts) @@ -113,32 +111,21 @@ def build_path_cache(): / "all" / str(path).replace(".py", ".rst").replace(os.sep, ".") ) - stub_path = stub_path.relative_to(CODE_DIR) - python_module_to_doc_path[path] = stub_path + stub_path = stub_path.relative_to(tools.utils.REPO_ROOT) + PYTHON_MODULE_TO_DOC_PATH[path] = stub_path if path.exists(): - doc_path_to_python_module[stub_path] = path + DOC_PATH_TO_PYTHON_MODULE[stub_path] = path build_path_cache() def build_file_list(files, extension): - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - for spath in path.split(): - if not spath.endswith(extension): - continue - _files.append(spath) - if not _files: - _files = CODE_DIR.rglob("*{}".format(extension)) + if not files: + _files = tools.utils.REPO_ROOT.rglob("*{}".format(extension)) else: - _files = [pathlib.Path(fname).resolve() for fname in _files] - _files = [path.relative_to(CODE_DIR) for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == extension] + _files = [path.relative_to(tools.utils.REPO_ROOT) for path in _files] return _files @@ -148,9 +135,9 @@ def build_python_module_paths(files): strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue _files.append(path) return _files @@ -160,8 +147,7 @@ def build_docs_paths(files): return build_file_list(files, ".rst") -@task(iterable=["files"], positional=["files"]) -def check_inline_markup(ctx, files): +def check_inline_markup(ctx: Context, files: list[pathlib.Path]) -> int: """ Check docstring for :doc: usage @@ -174,9 +160,6 @@ def check_inline_markup(ctx, files): https://github.com/saltstack/salt/issues/12788 """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - files = build_python_module_paths(files) exitcode = 0 @@ -188,18 +171,14 @@ def check_inline_markup(ctx, files): if not docstring: continue if ":doc:" in docstring: - utils.error( - "The {} function in {} contains ':doc:' usage", funcdef.name, path + ctx.error( + f"The {funcdef.name} function in {path} contains ':doc:' usage" ) exitcode += 1 return exitcode -@task(iterable=["files"]) -def check_stubs(ctx, files): - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - +def check_stubs(ctx: Context, files: list[pathlib.Path]) -> int: files = build_python_module_paths(files) exitcode = 0 @@ -207,21 +186,20 @@ def check_stubs(ctx, files): strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue - stub_path = python_module_to_doc_path[path] + stub_path = PYTHON_MODULE_TO_DOC_PATH[path] if not stub_path.exists(): exitcode += 1 - utils.error( - "The module at {} does not have a sphinx stub at {}", path, stub_path + ctx.error( + f"The module at {path} does not have a sphinx stub at {stub_path}" ) return exitcode -@task(iterable=["files"]) -def check_virtual(ctx, files): +def check_virtual(ctx: Context, files: list[pathlib.Path]) -> int: """ Check if .rst files for each module contains the text ".. _virtual" indicating it is a virtual doc page, and, in case a module exists by @@ -235,22 +213,16 @@ def check_virtual(ctx, files): try: contents = path.read_text() except Exception as exc: # pylint: disable=broad-except - utils.error( - "Error while processing '{}': {}".format( - path, - exc, - ) - ) + ctx.error(f"Error while processing '{path}': {exc}") exitcode += 1 continue if ".. _virtual-" in contents: try: - python_module = doc_path_to_python_module[path] - utils.error( - "The doc file at {} indicates that it's virtual, yet, there's a" - " python module at {} that will shaddow it.", - path, - python_module, + python_module = DOC_PATH_TO_PYTHON_MODULE[path] + ctx.error( + f"The doc file at {path} indicates that it's virtual, yet, " + f"there's a python module at {python_module} that will " + "shaddow it.", ) exitcode += 1 except KeyError: @@ -259,8 +231,7 @@ def check_virtual(ctx, files): return exitcode -@task(iterable=["files"]) -def check_module_indexes(ctx, files): +def check_module_indexes(ctx: Context, files: list[pathlib.Path]) -> int: exitcode = 0 files = build_docs_paths(files) for path in files: @@ -288,9 +259,8 @@ def check_module_indexes(ctx, files): ) if module_index != sorted(module_index): exitcode += 1 - utils.error( - "The autosummary mods in {} are not properly sorted. Please sort them.", - path, + ctx.error( + f"The autosummary mods in {path} are not properly sorted. Please sort them.", ) module_index_duplicates = [ @@ -298,8 +268,8 @@ def check_module_indexes(ctx, files): ] if module_index_duplicates: exitcode += 1 - utils.error( - "Module index {} contains duplicates: {}", path, module_index_duplicates + ctx.error( + f"Module index {path} contains duplicates: {module_index_duplicates}" ) # Let's check if all python modules are included in the index path_parts = list(path.parts) @@ -320,7 +290,7 @@ def check_module_indexes(ctx, files): package = "log_handlers" path_parts = [] python_package = SALT_CODE_DIR.joinpath(package, *path_parts).relative_to( - CODE_DIR + tools.utils.REPO_ROOT ) modules = set() for module in python_package.rglob("*.py"): @@ -358,26 +328,26 @@ def check_module_indexes(ctx, files): missing_modules_in_index = set(modules) - set(module_index) if missing_modules_in_index: exitcode += 1 - utils.error( - "The module index at {} is missing the following modules: {}", - path, - ", ".join(missing_modules_in_index), + ctx.error( + f"The module index at {path} is missing the following modules: " + f"{', '.join(missing_modules_in_index)}" ) extra_modules_in_index = set(module_index) - set(modules) if extra_modules_in_index: exitcode += 1 - utils.error( - "The module index at {} has extra modules(non existing): {}", - path, - ", ".join(extra_modules_in_index), + ctx.error( + f"The module index at {path} has extra modules(non existing): " + f"{', '.join(extra_modules_in_index)}" ) return exitcode -@task(iterable=["files"]) -def check_stray(ctx, files): +def check_stray(ctx: Context, files: list[pathlib.Path]) -> int: exitcode = 0 - exclude_paths = ( + exclude_pathlib_paths: tuple[pathlib.Path, ...] + exclude_paths: tuple[str, ...] + + exclude_pathlib_paths = ( DOCS_DIR / "_inc", DOCS_DIR / "ref" / "cli" / "_includes", DOCS_DIR / "ref" / "cli", @@ -412,41 +382,50 @@ def check_stray(ctx, files): DOCS_DIR / "ref" / "states" / "writing.rst", DOCS_DIR / "topics", ) - exclude_paths = tuple(str(p.relative_to(CODE_DIR)) for p in exclude_paths) + exclude_paths = tuple( + str(p.relative_to(tools.utils.REPO_ROOT)) for p in exclude_pathlib_paths + ) files = build_docs_paths(files) for path in files: - if not str(path).startswith(str((DOCS_DIR / "ref").relative_to(CODE_DIR))): + if not str(path).startswith( + str((DOCS_DIR / "ref").relative_to(tools.utils.REPO_ROOT)) + ): continue if str(path).startswith(exclude_paths): continue if path.name in ("index.rst", "glossary.rst", "faq.rst", "README.rst"): continue - try: - python_module = doc_path_to_python_module[path] - except KeyError: + if path not in DOC_PATH_TO_PYTHON_MODULE: contents = path.read_text() if ".. _virtual-" in contents: continue exitcode += 1 - utils.error( - "The doc at {} doesn't have a corresponding python module and is" - " considered a stray doc. Please remove it.", - path, + ctx.error( + f"The doc at {path} doesn't have a corresponding python module " + "and is considered a stray doc. Please remove it." ) return exitcode -@task(iterable=["files"]) -def check(ctx, files): +@cgroup.command( + name="check", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + } + }, +) +def check(ctx: Context, files: list[pathlib.Path]) -> None: exitcode = 0 - utils.info("Checking inline :doc: markup") + ctx.info("Checking inline :doc: markup") exitcode += check_inline_markup(ctx, files) - utils.info("Checking python module stubs") + ctx.info("Checking python module stubs") exitcode += check_stubs(ctx, files) - utils.info("Checking virtual modules") + ctx.info("Checking virtual modules") exitcode += check_virtual(ctx, files) - utils.info("Checking stray docs") + ctx.info("Checking stray docs") exitcode += check_stray(ctx, files) - utils.info("Checking doc module indexes") + ctx.info("Checking doc module indexes") exitcode += check_module_indexes(ctx, files) - utils.exit_invoke(exitcode) + ctx.exit(exitcode) diff --git a/tasks/docstrings.py b/tools/precommit/docstrings.py similarity index 87% rename from tasks/docstrings.py rename to tools/precommit/docstrings.py index 3aed5c7fa87..9cbc5a848d0 100644 --- a/tasks/docstrings.py +++ b/tools/precommit/docstrings.py @@ -1,10 +1,10 @@ """ - tasks.docstrings - ~~~~~~~~~~~~~~~~ - - Docstrings related tasks +Check salt code base for for missing or wrong docstrings. """ -# pylint: disable=resource-leakage +# Skip mypy checks since it will follow into Salt which doesn't yet have proper types defined +# mypy: ignore-errors +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations import ast import os @@ -13,16 +13,15 @@ import re import sys from typing import TYPE_CHECKING -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group -from salt.loader import SALT_INTERNAL_LOADERS_PATHS +import tools.utils from salt.version import SaltStackVersion -from tasks import utils +from tools.precommit import SALT_INTERNAL_LOADERS_PATHS -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -SALT_CODE_DIR = CODE_DIR / "salt" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" SALT_MODULES_PATH = SALT_CODE_DIR / "modules" -THIS_FILE = pathlib.Path(__file__).relative_to(CODE_DIR) +THIS_FILE = pathlib.Path(__file__).relative_to(tools.utils.REPO_ROOT) MISSING_DOCSTRINGS = { "salt/auth/django.py": ["is_connection_usable"], @@ -141,7 +140,6 @@ MISSING_DOCSTRINGS = { "salt/pillar/gpg.py": ["ext_pillar"], "salt/pillar/makostack.py": ["ext_pillar"], "salt/pillar/nacl.py": ["ext_pillar"], - "salt/pillar/stack.py": ["ext_pillar"], "salt/proxy/cisconso.py": ["init"], "salt/proxy/esxi.py": ["is_connected_via_vcenter"], "salt/proxy/fx2.py": ["host"], @@ -297,7 +295,6 @@ MISSING_DOCSTRINGS = { "iter_entry_points", ], "salt/utils/error.py": ["pack_exception"], - "salt/utils/etcd_util.py": ["get_conn", "tree"], "salt/utils/find.py": ["path_depth"], "salt/utils/gzip_util.py": ["open_fileobj", "uncompress", "open"], "salt/utils/icinga2.py": ["get_certs_path"], @@ -308,7 +305,6 @@ MISSING_DOCSTRINGS = { "regex_escape", ], "salt/utils/listdiffer.py": ["list_diff"], - "salt/utils/master.py": ["get_master_key", "ping_all_connected_minions"], "salt/utils/namecheap.py": [ "atts_to_dict", "get_opts", @@ -332,7 +328,6 @@ MISSING_DOCSTRINGS = { ], "salt/utils/openstack/swift.py": ["mkdirs", "check_swift"], "salt/utils/pkg/__init__.py": ["split_comparison"], - "salt/utils/process.py": ["systemd_notify_call", "default_signals"], "salt/utils/profile.py": ["activate_profile", "output_profile"], "salt/utils/pyobjects.py": ["need_salt"], "salt/utils/reclass.py": [ @@ -360,13 +355,6 @@ MISSING_DOCSTRINGS = { "salt/utils/ssh.py": ["key_is_encrypted"], "salt/utils/stringio.py": ["is_writable", "is_stringio", "is_readable"], "salt/utils/stringutils.py": ["random"], - "salt/utils/templates.py": [ - "wrap_tmpl_func", - "render_mako_tmpl", - "render_jinja_tmpl", - "render_wempy_tmpl", - ], - "salt/utils/verify.py": ["verify_logs_filter"], "salt/utils/virtualbox.py": [ "machine_get_machinestate_str", "machine_get_machinestate_tuple", @@ -380,13 +368,10 @@ MISSING_DOCSTRINGS = { ], "salt/utils/yamlloader.py": ["load"], "salt/utils/yamlloader_old.py": ["load"], - "salt/utils/zeromq.py": ["check_ipc_path_max_len"], } MISSING_EXAMPLES = { "salt/modules/acme.py": ["has", "renew_by", "needs_renewal"], - "salt/modules/ansiblegate.py": ["help", "list_"], "salt/modules/apkpkg.py": ["purge"], - "salt/modules/aptpkg.py": ["expand_repo_def"], "salt/modules/arista_pyeapi.py": ["get_connection"], "salt/modules/artifactory.py": [ "get_latest_release", @@ -475,7 +460,6 @@ MISSING_EXAMPLES = { "salt/modules/boto_ssm.py": ["get_parameter", "delete_parameter", "put_parameter"], "salt/modules/capirca_acl.py": ["get_filter_pillar", "get_term_pillar"], "salt/modules/ceph.py": ["zap"], - "salt/modules/chroot.py": ["exist"], "salt/modules/ciscoconfparse_mod.py": [ "find_objects", "find_objects_wo_child", @@ -489,7 +473,6 @@ MISSING_EXAMPLES = { "set_data_value", "apply_rollback", ], - "salt/modules/cp.py": ["envs", "recv", "recv_chunked"], "salt/modules/cryptdev.py": ["active"], "salt/modules/datadog_api.py": ["post_event"], "salt/modules/defaults.py": ["deepcopy", "update"], @@ -608,7 +591,6 @@ MISSING_EXAMPLES = { "salt/modules/napalm_probes.py": ["delete_probes", "schedule_probes", "set_probes"], "salt/modules/netbox.py": ["get_", "filter_", "slugify"], "salt/modules/netmiko_mod.py": ["call", "multi_call", "get_connection"], - "salt/modules/network.py": ["fqdns"], "salt/modules/neutronng.py": [ "get_openstack_cloud", "compare_changes", @@ -763,21 +745,13 @@ MISSING_EXAMPLES = { "register_vm", "get_vm_config", "get_vm_config_file", - "list_licenses", "compare_vm_configs", "get_advanced_configs", "delete_advanced_configs", - "create_vmfs_datastore", "get_vm", ], "salt/modules/win_pkg.py": ["get_package_info"], "salt/modules/win_timezone.py": ["zone_compare"], - "salt/modules/zabbix.py": [ - "substitute_params", - "get_zabbix_id_mapper", - "get_object_id_by_params", - "compare_params", - ], "salt/modules/zk_concurrency.py": [ "lock", "party_members", @@ -827,8 +801,17 @@ you've made already. Whatever approach you decide to take, just drop a comment in the PR letting us know! """ +cgroup = command_group(name="docstrings", help=__doc__, parent="pre-commit") -def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: str): + +def annotate( + ctx: Context, + kind: str, + fpath: pathlib.Path, + start_lineno: int, + end_lineno: int, + message: str, +) -> None: if kind not in ("warning", "error"): raise RuntimeError("The annotation kind can only be one of 'warning', 'error'.") if os.environ.get("GH_ACTIONS_ANNOTATE") is None: @@ -836,7 +819,7 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: github_output = os.environ.get("GITHUB_OUTPUT") if github_output is None: - utils.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.") + ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.") return if TYPE_CHECKING: @@ -846,40 +829,52 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: message.rstrip().replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A") ) # Print it to stdout so that the GitHub runner pick's it up and adds the annotation - print( + ctx.print( f"::{kind} file={fpath},line={start_lineno},endLine={end_lineno}::{message}", file=sys.stdout, flush=True, ) -@task(iterable=["files"], positional=["files"]) -def check(ctx, files, check_proper_formatting=False, error_on_known_failures=False): +@cgroup.command( + name="check", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + }, + "suppress_warnings": { + "help": "Supress warning messages on known issues", + }, + "check_proper_formatting": { + "help": "Run formatting checks on docstrings", + }, + "error_on_known_failures": { + "help": "Raise an error on known failures", + }, + }, +) +def check_docstrings( + ctx: Context, + files: list[pathlib.Path], + suppress_warnings: bool = False, + check_proper_formatting: bool = False, + error_on_known_failures: bool = False, +) -> None: """ Check salt's docstrings """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - _files.extend(path.split()) - if not _files: - _files = SALT_CODE_DIR.rglob("*.py") + if not files: + _files = list(SALT_CODE_DIR.rglob("*.py")) else: - _files = [pathlib.Path(fname) for fname in _files] - - _files = [path.resolve() for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] errors = 0 exitcode = 0 warnings = 0 for path in _files: + if str(path).startswith(str(tools.utils.REPO_ROOT / "salt" / "ext")): + continue contents = path.read_text() try: module = ast.parse(path.read_text(), filename=str(path)) @@ -889,10 +884,11 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if error: errors += 1 exitcode = 1 - utils.error( - "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", - path.relative_to(CODE_DIR), - *error, + ctx.error( + "The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format( + path.relative_to(tools.utils.REPO_ROOT), + *error, + ) ) for funcdef in [ @@ -904,17 +900,19 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if error: errors += 1 exitcode = 1 - utils.error( - "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", - path.relative_to(CODE_DIR), - *error, + ctx.error( + "The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format( + path, + *error, + ) ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path, funcdef.lineno, funcdef.body[0].lineno, - "Version {1:r!} is not valid for {0!r}".format(*error), + "Version {1!r} is not valid for {0!r}".format(*error), ) if not str(path).startswith(SALT_INTERNAL_LOADERS_PATHS): @@ -922,7 +920,7 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue funcname = funcdef.name - relpath = str(path.relative_to(CODE_DIR)) + relpath = str(path.relative_to(tools.utils.REPO_ROOT)) # We're dealing with a salt loader module if funcname.startswith("_"): @@ -935,14 +933,14 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal and error_on_known_failures is False ): warnings += 1 - utils.warn( - "The function '{}' on '{}' does not have a docstring", - funcname, - relpath, - ) + if suppress_warnings is False: + ctx.warn( + f"The function '{funcname}' on '{relpath}' does not have a docstring" + ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing docstring", @@ -950,14 +948,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue errors += 1 exitcode = 1 - utils.error( - "The function '{}' on '{}' does not have a docstring", - funcname, - relpath, + ctx.error( + f"The function '{funcname}' on '{relpath}' does not have a docstring" ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing docstring", @@ -966,14 +963,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal elif funcname in MISSING_DOCSTRINGS.get(relpath, ()): # This was previously a know function with a missing docstring. # Warn about it so that it get's removed from this list - warnings += 1 - utils.warn( - "The function '{}' on '{}' was previously known to not have a docstring, " - "which is no longer the case. Please remove it from 'MISSING_DOCSTRINGS' ." - "in '{}'", - funcname, - relpath, - THIS_FILE, + errors += 1 + exitcode = 1 + ctx.error( + f"The function '{funcname}' on '{relpath}' was previously known to not " + "have a docstring, which is no longer the case. Please remove it from " + f"'MISSING_DOCSTRINGS' in '{THIS_FILE}'" ) try: @@ -993,14 +988,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal and error_on_known_failures is False ): warnings += 1 - utils.warn( - "The function '{}' on '{}' does not have a 'CLI Example:' in its docstring", - funcname, - relpath, - ) + if suppress_warnings is False: + ctx.warn( + f"The function '{funcname}' on '{relpath}' does not have a " + "'CLI Example:' in its docstring" + ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing 'CLI Example:' in docstring", @@ -1008,14 +1004,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue errors += 1 exitcode = 1 - utils.error( - "The function '{}' on '{}' does not have a 'CLI Example:' in its docstring", - funcname, - relpath, + ctx.error( + f"The function '{funcname}' on '{relpath}' does not have a 'CLI Example:' in its docstring" ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing 'CLI Example:' in docstring", @@ -1024,14 +1019,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal elif funcname in MISSING_EXAMPLES.get(relpath, ()): # This was previously a know function with a missing CLI example # Warn about it so that it get's removed from this list - warnings += 1 - utils.warn( - "The function '{}' on '{}' was previously known to not have a CLI Example, " - "which is no longer the case. Please remove it from 'MISSING_EXAMPLES'. " - "in '{}'", - funcname, - relpath, - THIS_FILE, + errors += 1 + exitcode = 1 + ctx.error( + f"The function '{funcname}' on '{relpath}' was previously known to not " + "have a CLI Example, which is no longer the case. Please remove it from " + f"'MISSING_EXAMPLES' in '{THIS_FILE}'" ) if check_proper_formatting is False: @@ -1042,20 +1035,22 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if _check_cli_example_proper_formatting(docstring) is False: errors += 1 exitcode = 1 - utils.error( + ctx.error( "The function {!r} on '{}' does not have a proper 'CLI Example:' section in " "its docstring. The proper format is:\n" "CLI Example:\n" "\n" ".. code-block:: bash\n" "\n" - " salt '*' \n", - funcdef.name, - path.relative_to(CODE_DIR), + " salt '*' \n".format( + funcdef.name, + path.relative_to(tools.utils.REPO_ROOT), + ) ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Wrong format in 'CLI Example:' in docstring.\n" @@ -1072,15 +1067,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal path.write_text(contents) if warnings: - utils.warn("Found {} warnings", warnings) + ctx.warn(f"Found {warnings} warnings") if exitcode: - utils.error("Found {} errors", errors) + ctx.error(f"Found {errors} errors") if os.environ.get("GH_ACTIONS_ANNOTATE") and (warnings or errors): github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") if github_step_summary: with open(github_step_summary, "w", encoding="utf-8") as wfh: wfh.write(SUMMARY) - utils.exit_invoke(exitcode) + ctx.exit(exitcode) CHECK_VALID_VERSION_RE = re.compile( diff --git a/tools/precommit/filemap.py b/tools/precommit/filemap.py new file mode 100644 index 00000000000..96a662fa7e7 --- /dev/null +++ b/tools/precommit/filemap.py @@ -0,0 +1,91 @@ +""" +`tests/filename_map.yml` validity checks +""" +import pathlib +import re + +import yaml +from ptscripts import Context, command_group + +import tools.utils + +FILENAME_MAP_PATH = tools.utils.REPO_ROOT / "tests" / "filename_map.yml" + +cgroup = command_group(name="filemap", help=__doc__, parent="pre-commit") + + +def _match_to_test_file(match: str) -> pathlib.Path: + tests_path = tools.utils.REPO_ROOT / "tests" + parts = match.split(".") + parts[-1] += ".py" + return tests_path.joinpath(*parts).relative_to(tools.utils.REPO_ROOT) + + +def _check_matches(ctx: Context, rule: str, matches: list[str]) -> int: + errors = 0 + for match in matches: + filematch = _match_to_test_file(match) + if not filematch.exists(): + ctx.error( + f"The match '{match}' for rule '{rule}' points to a non " + f"existing test module path: {filematch}" + ) + errors += 1 + return errors + + +@cgroup.command( + name="check", +) +def check(ctx: Context) -> None: + exitcode = 0 + excludes = ("tools/", "templates/", ".nox/") + full_filelist = [ + path.relative_to(tools.utils.REPO_ROOT) + for path in tools.utils.REPO_ROOT.rglob("*.py") + ] + filelist = [ + str(path) for path in full_filelist if not str(path).startswith(excludes) + ] + filename_map = yaml.safe_load(FILENAME_MAP_PATH.read_text()) + for rule, matches in filename_map.items(): + if rule == "*": + exitcode += _check_matches(ctx, rule, matches) + elif "|" in rule: + # This is regex + for filepath in filelist: + if re.match(rule, filepath): + # Found at least one match, stop looking + break + else: + ctx.error( + f"Could not find a matching file in the salt repo for the rule '{rule}'" + ) + exitcode += 1 + continue + exitcode += _check_matches(ctx, rule, matches) + elif "*" in rule or "\\" in rule: + # Glob matching + process_matches = True + for filerule in tools.utils.REPO_ROOT.glob(rule): + if not filerule.exists(): + ctx.error( + f"The rule '{rule}' points to a non existing path: {filerule}" + ) + exitcode += 1 + process_matches = False + if process_matches: + exitcode += _check_matches(ctx, rule, matches) + else: + # Direct file paths as rules + filerule = pathlib.Path(rule) + if not filerule.exists(): + ctx.error( + f"The rule '{rule}' points to a non existing path: {filerule}" + ) + exitcode += 1 + continue + exitcode += _check_matches(ctx, rule, matches) + if exitcode: + ctx.error(f"Found {exitcode} errors") + ctx.exit(exitcode) diff --git a/tasks/loader.py b/tools/precommit/loader.py similarity index 58% rename from tasks/loader.py rename to tools/precommit/loader.py index d65e5e28591..bbec9c00f92 100644 --- a/tasks/loader.py +++ b/tools/precommit/loader.py @@ -1,24 +1,35 @@ """ - tasks.loader - ~~~~~~~~~~~~ - - Salt loader checks +Salt loader checks """ import ast import pathlib -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group -from salt.loader import SALT_INTERNAL_LOADERS_PATHS -from tasks import utils +import tools.utils +from tools.precommit import SALT_INTERNAL_LOADERS_PATHS -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -SALT_CODE_DIR = CODE_DIR / "salt" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" + +cgroup = command_group(name="salt-loaders", help=__doc__, parent="pre-commit") -@task(iterable=["files"], positional=["files"]) -def check_virtual(ctx, files, enforce_virtualname=False): +@cgroup.command( + name="check-virtual", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + }, + "enforce_virtualname": { + "help": "Enforce the usage of `__virtualname__`", + }, + }, +) +def check_virtual( + ctx: Context, files: list[pathlib.Path], enforce_virtualname: bool = False +) -> None: """ Check Salt loader modules for a defined `__virtualname__` attribute and `__virtual__` function. @@ -26,23 +37,10 @@ def check_virtual(ctx, files, enforce_virtualname=False): https://github.com/saltstack/salt/blob/27ae8260983b11fe6e32a18e777d550be9fe1dc2/tests/unit/test_virtualname.py """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - _files.extend(path.split()) - if not _files: - _files = SALT_CODE_DIR.rglob("*.py") + if not files: + _files = list(SALT_CODE_DIR.rglob("*.py")) else: - _files = [pathlib.Path(fname) for fname in _files] - - _files = [path.resolve() for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] errors = 0 exitcode = 0 @@ -78,14 +76,15 @@ def check_virtual(ctx, files, enforce_virtualname=False): continue if target.id == "__virtualname__": found_virtualname_attr = True - if node.value.s not in path.name: + if node.value.s not in path.name: # type: ignore[attr-defined] errors += 1 exitcode = 1 - utils.error( + ctx.error( 'The value of the __virtualname__ attribute, "{}"' - " is not part of {}", - node.value.s, - path.name, + " is not part of {}".format( + node.value.s, # type: ignore[attr-defined] + path.name, + ) ) if found_virtualname_attr: break @@ -93,11 +92,10 @@ def check_virtual(ctx, files, enforce_virtualname=False): if not found_virtualname_attr and enforce_virtualname: errors += 1 exitcode = 1 - utils.error( - "The salt loader module {} defines a __virtual__() function but does" - " not define a __virtualname__ attribute", - path.relative_to(CODE_DIR), + ctx.error( + f"The salt loader module {path.relative_to(tools.utils.REPO_ROOT)} defines " + "a __virtual__() function but does not define a __virtualname__ attribute" ) if exitcode: - utils.error("Found {} errors", errors) - utils.exit_invoke(exitcode) + ctx.error(f"Found {errors} errors") + ctx.exit(exitcode) diff --git a/tools/pre_commit.py b/tools/precommit/workflows.py similarity index 71% rename from tools/pre_commit.py rename to tools/precommit/workflows.py index 0cd219f139a..3a65b368b93 100644 --- a/tools/pre_commit.py +++ b/tools/precommit/workflows.py @@ -1,5 +1,5 @@ """ -These commands are used by pre-commit. +These commands are used for our GitHub Actions workflows. """ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations @@ -19,9 +19,13 @@ log = logging.getLogger(__name__) WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" TEMPLATES = WORKFLOWS / "templates" + # Define the command group cgroup = command_group( - name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ + name="workflows", + help="Pre-Commit GH Actions Workflows Related Commands", + description=__doc__, + parent="pre-commit", ) @@ -86,31 +90,34 @@ def generate_workflows(ctx: Context): } test_salt_listing = { "linux": [ - ("almalinux-8", "Alma Linux 8", "x86_64"), - ("almalinux-9", "Alma Linux 9", "x86_64"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64"), - ("archlinux-lts", "Arch Linux LTS", "x86_64"), - ("centos-7", "CentOS 7", "x86_64"), - ("centosstream-8", "CentOS Stream 8", "x86_64"), - ("centosstream-9", "CentOS Stream 9", "x86_64"), - ("debian-10", "Debian 10", "x86_64"), - ("debian-11", "Debian 11", "x86_64"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("debian-12", "Debian 12", "x86_64"), - ("debian-12-arm64", "Debian 12 Arm64", "aarch64"), - ("fedora-37", "Fedora 37", "x86_64"), - ("fedora-38", "Fedora 38", "x86_64"), - ("opensuse-15", "Opensuse 15", "x86_64"), - ("photonos-3", "Photon OS 3", "x86_64"), - ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64"), - ("photonos-4", "Photon OS 4", "x86_64"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64"), - ("photonos-5", "Photon OS 5", "x86_64"), - ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64"), + ("almalinux-8", "Alma Linux 8", "x86_64", "no-fips"), + ("almalinux-9", "Alma Linux 9", "x86_64", "no-fips"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "no-fips"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "no-fips"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "no-fips"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64", "no-fips"), + ("archlinux-lts", "Arch Linux LTS", "x86_64", "no-fips"), + ("centos-7", "CentOS 7", "x86_64", "no-fips"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "no-fips"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "no-fips"), + ("debian-10", "Debian 10", "x86_64", "no-fips"), + ("debian-11", "Debian 11", "x86_64", "no-fips"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "no-fips"), + ("debian-12", "Debian 12", "x86_64", "no-fips"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "no-fips"), + ("fedora-37", "Fedora 37", "x86_64", "no-fips"), + ("fedora-38", "Fedora 38", "x86_64", "no-fips"), + ("opensuse-15", "Opensuse 15", "x86_64", "no-fips"), + ("photonos-3", "Photon OS 3", "x86_64", "no-fips"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "fips"), + ("photonos-5", "Photon OS 5", "x86_64", "fips"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "fips"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "no-fips"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "no-fips"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "no-fips"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "no-fips"), ], "macos": [ ("macos-12", "macOS 12", "x86_64"), @@ -122,27 +129,50 @@ def generate_workflows(ctx: Context): ("windows-2022", "Windows 2022", "amd64"), ], } + test_salt_pkg_listing = { "linux": [ - ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"), - ("centos-7", "CentOS 7", "x86_64", "rpm"), - ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"), - ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"), - ("debian-10", "Debian 10", "x86_64", "deb"), - ("debian-11", "Debian 11", "x86_64", "deb"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"), - ("debian-12", "Debian 12", "x86_64", "deb"), - ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb"), - ("photonos-3", "Photon OS 3", "x86_64", "rpm"), - ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm"), - ("photonos-4", "Photon OS 4", "x86_64", "rpm"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm"), - ("photonos-5", "Photon OS 5", "x86_64", "rpm"), - ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "rpm"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm", "no-fips"), + ( + "amazonlinux-2-arm64", + "Amazon Linux 2 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm", "no-fips"), + ( + "amazonlinux-2023-arm64", + "Amazon Linux 2023 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm", "no-fips"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm", "no-fips"), + ( + "centosstream-9-arm64", + "CentOS Stream 9 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("debian-10", "Debian 10", "x86_64", "deb", "no-fips"), + ("debian-11", "Debian 11", "x86_64", "deb", "no-fips"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb", "no-fips"), + ("debian-12", "Debian 12", "x86_64", "deb", "no-fips"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb", "no-fips"), + ("photonos-3", "Photon OS 3", "x86_64", "rpm", "no-fips"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "rpm", "fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "fips"), + ("photonos-5", "Photon OS 5", "x86_64", "rpm", "fips"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "rpm", "fips"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb", "no-fips"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb", "no-fips"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb", "no-fips"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb", "no-fips"), ], "macos": [ ("macos-12", "macOS 12", "x86_64"), @@ -154,6 +184,7 @@ def generate_workflows(ctx: Context): ("windows-2022", "Windows 2022", "amd64"), ], } + build_ci_deps_listing = { "linux": [ ("almalinux-8", "Alma Linux 8", "x86_64"), @@ -162,6 +193,8 @@ def generate_workflows(ctx: Context): ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"), ("amazonlinux-2", "Amazon Linux 2", "x86_64"), ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64"), ("archlinux-lts", "Arch Linux LTS", "x86_64"), ("centos-7", "CentOS 7", "x86_64"), ("centos-7-arm64", "CentOS 7 Arm64", "aarch64"), @@ -205,10 +238,26 @@ def generate_workflows(ctx: Context): "macos": [], "windows": [], } + rpm_slugs = [ + "almalinux", + "amazonlinux", + "centos", + "centosstream", + "fedora", + "photon", + ] for slug, display_name, arch in build_ci_deps_listing["linux"]: if slug in ("archlinux-lts", "opensuse-15"): continue test_salt_pkg_downloads_listing["linux"].append((slug, arch, "package")) + # Account for old arm64 repo paths + if arch == "aarch64": + for test_slug in rpm_slugs: + if slug.startswith(test_slug): + test_salt_pkg_downloads_listing["linux"].append( + (slug, "arm64", "package") + ) + break for slug, display_name, arch in build_ci_deps_listing["linux"][-2:]: if slug in ("archlinux-lts", "opensuse-15"): continue diff --git a/tools/release.py b/tools/release.py index d8a305b829d..9bc5b07eff5 100644 --- a/tools/release.py +++ b/tools/release.py @@ -8,28 +8,17 @@ import json import logging import os import pathlib -import sys import tempfile import time +import boto3 import virustotal3.core +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.utils import tools.utils.repo -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) # Define the command group diff --git a/tools/testsuite/download.py b/tools/testsuite/download.py index cd6d51aa5fc..f5bba856aed 100644 --- a/tools/testsuite/download.py +++ b/tools/testsuite/download.py @@ -190,7 +190,7 @@ def download_artifact( repository: str = "saltstack/salt", ): """ - Download CI built packages artifacts. + Download CI artifacts. """ if TYPE_CHECKING: assert artifact_name is not None @@ -214,6 +214,8 @@ def download_artifact( repository=repository, artifact_name=str(artifact_name), ) + if TYPE_CHECKING: + assert succeeded is not None ctx.info(succeeded) if succeeded: ctx.info(f"Downloaded {artifact_name} to {dest}") diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py index d4331c178ca..1dbf85113ba 100644 --- a/tools/utils/__init__.py +++ b/tools/utils/__init__.py @@ -14,7 +14,9 @@ from datetime import datetime from enum import IntEnum from typing import Any +import boto3 import packaging.version +from botocore.exceptions import ClientError from ptscripts import Context from rich.progress import ( BarColumn, @@ -220,7 +222,7 @@ def download_file( ctx: Context, url: str, dest: pathlib.Path, - auth: str | None = None, + auth: tuple[str, str] | None = None, headers: dict[str, str] | None = None, ) -> pathlib.Path: ctx.info(f"Downloading {dest.name!r} @ {url} ...") @@ -238,7 +240,7 @@ def download_file( return dest wget = shutil.which("wget") if wget is not None: - with ctx.cwd(dest.parent): + with ctx.chdir(dest.parent): command = [wget, "--no-verbose"] if headers: for key, value in headers.items(): @@ -251,7 +253,8 @@ def download_file( return dest # NOTE the stream=True parameter below with ctx.web as web: - web.headers.update(headers) + if headers: + web.headers.update(headers) with web.get(url, stream=True, auth=auth) as r: r.raise_for_status() with dest.open("wb") as f: diff --git a/tools/vm.py b/tools/vm.py index d4aefd9837b..a8fa51ea748 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -21,33 +21,22 @@ from datetime import datetime from functools import lru_cache from typing import TYPE_CHECKING, cast +import attr +import boto3 +from botocore.exceptions import ClientError from ptscripts import Context, command_group from requests.exceptions import ConnectTimeout +from rich.progress import ( + BarColumn, + Column, + Progress, + TaskProgressColumn, + TextColumn, + TimeRemainingColumn, +) import tools.utils -try: - import attr - import boto3 - from botocore.exceptions import ClientError - from rich.progress import ( - BarColumn, - Column, - Progress, - TaskProgressColumn, - TextColumn, - TimeRemainingColumn, - ) -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - - if TYPE_CHECKING: # pylint: disable=no-name-in-module from boto3.resources.factory.ec2 import Instance @@ -222,14 +211,18 @@ def ssh(ctx: Context, name: str, command: list[str], sudo: bool = False): "help": "The VM Name", "metavar": "VM_NAME", }, + "download": { + "help": "Rsync from the remote target to local salt checkout", + "action": "store_true", + }, } ) -def rsync(ctx: Context, name: str): +def rsync(ctx: Context, name: str, download: bool = False): """ Sync local checkout to VM. """ vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) - vm.upload_checkout() + vm.upload_checkout(download=download) @vm.command( @@ -302,6 +295,7 @@ def test( print_system_info: bool = False, skip_code_coverage: bool = False, envvars: list[str] = None, + fips: bool = False, ): """ Run test in the VM. @@ -337,6 +331,9 @@ def test( if "photonos" in name: skip_known_failures = os.environ.get("SKIP_INITIAL_PHOTONOS_FAILURES", "1") env["SKIP_INITIAL_PHOTONOS_FAILURES"] = skip_known_failures + if fips: + env["FIPS_TESTRUN"] = "1" + vm.run(["tdnf", "install", "-y", "openssl-fips-provider"], sudo=True) if envvars: for key in envvars: if key not in os.environ: @@ -849,6 +846,9 @@ class VM: forward_agent = "no" else: forward_agent = "yes" + ciphers = "" + if "photonos" in self.name: + ciphers = "Ciphers=aes256-gcm@openssh.com,aes256-cbc,aes256-ctr,chacha20-poly1305@openssh.com,aes128-ctr,aes192-ctr,aes128-gcm@openssh.com" ssh_config = textwrap.dedent( f"""\ Host {self.name} @@ -860,7 +860,8 @@ class VM: StrictHostKeyChecking=no UserKnownHostsFile=/dev/null ForwardAgent={forward_agent} - PasswordAuthentication no + PasswordAuthentication=no + {ciphers} """ ) self.ssh_config_file.write_text(ssh_config) @@ -1293,7 +1294,7 @@ class VM: shutil.rmtree(self.state_dir, ignore_errors=True) self.instance = None - def upload_checkout(self, verbose=True): + def upload_checkout(self, verbose=True, download=False): rsync_flags = [ "--delete", "--no-group", @@ -1301,6 +1302,8 @@ class VM: "--exclude", ".nox/", "--exclude", + ".tools-venvs/", + "--exclude", ".pytest_cache/", "--exclude", f"{STATE_DIR.relative_to(tools.utils.REPO_ROOT)}{os.path.sep}", @@ -1326,14 +1329,19 @@ class VM: # Remote repo path remote_path = self.upload_path.as_posix() rsync_remote_path = remote_path - if self.is_windows: + if sys.platform == "win32": for drive in ("c:", "C:"): source = source.replace(drive, "/cygdrive/c") - rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") source = source.replace("\\", "/") + if self.is_windows: + for drive in ("c:", "C:"): + rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") destination = f"{self.name}:{rsync_remote_path}" description = "Rsync local checkout to VM..." - self.rsync(source, destination, description, rsync_flags) + if download: + self.rsync(f"{destination}/*", source, description, rsync_flags) + else: + self.rsync(source, destination, description, rsync_flags) if self.is_windows: # rsync sets very strict file permissions and disables inheritance # we only need to reset permissions so they inherit from the parent @@ -1520,16 +1528,17 @@ class VM: self.ctx.exit(1, "Could find the 'rsync' binary") if TYPE_CHECKING: assert rsync + ssh_cmd = " ".join( + self.ssh_command_args( + include_vm_target=False, log_command_level=logging.NOTSET + ) + ) cmd: list[str] = [ - rsync, + f'"{rsync}"' if sys.platform == "win32" else rsync, "-az", "--info=none,progress2", "-e", - " ".join( - self.ssh_command_args( - include_vm_target=False, log_command_level=logging.NOTSET - ) - ), + f'"{ssh_cmd}"' if sys.platform == "win32" else ssh_cmd, ] if rsync_flags: cmd.extend(rsync_flags) @@ -1542,6 +1551,8 @@ class VM: log.info(f"Running {' '.join(cmd)!r}") # type: ignore[arg-type] progress = create_progress_bar(transient=True) task = progress.add_task(description, total=100) + if sys.platform == "win32": + cmd = [" ".join(cmd)] with progress: proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, text=True) completed = 0