From c2a6550bc1d5d8756b22ebf0d155c6f28bece6d0 Mon Sep 17 00:00:00 2001 From: hurzhurz Date: Fri, 26 Apr 2024 09:48:47 +0200 Subject: [PATCH 001/160] fix wintask.info: ExecutionTimeLimit and unknown values ExecutionTimeLimit: can be nothing or "PT0S" if not used _reverse_lookup: if not found, return "invalid value" instead of the last dict element --- salt/modules/win_task.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/salt/modules/win_task.py b/salt/modules/win_task.py index 2b23e381bc0..78a5201e9f8 100644 --- a/salt/modules/win_task.py +++ b/salt/modules/win_task.py @@ -253,6 +253,8 @@ def _reverse_lookup(dictionary, value): value_index = idx break + if value_index < 0: + return "invalid value" return list(dictionary)[value_index] @@ -1494,7 +1496,7 @@ def info(name, location="\\"): duration, def_set.DeleteExpiredTaskAfter ) - if def_set.ExecutionTimeLimit == "": + if def_set.ExecutionTimeLimit == "" or def_set.ExecutionTimeLimit == "PT0S": settings["execution_time_limit"] = False else: settings["execution_time_limit"] = _reverse_lookup( From cf63bb34454498c2fde82de4220299dc342ad044 Mon Sep 17 00:00:00 2001 From: hurzhurz Date: Fri, 26 Apr 2024 09:50:33 +0200 Subject: [PATCH 002/160] fix win_task result and error code interpretation correct high result/error code numbers that are returned as negative numbers by win32com. handle unknown LastTaskResult result codes. --- salt/modules/win_task.py | 36 ++++++++++++++++++++++++++---------- 1 file changed, 26 insertions(+), 10 deletions(-) diff --git a/salt/modules/win_task.py b/salt/modules/win_task.py index 78a5201e9f8..e763e420fcb 100644 --- a/salt/modules/win_task.py +++ b/salt/modules/win_task.py @@ -182,6 +182,15 @@ def __virtual__(): return False, "Module win_task: module only works on Windows systems" +def _signed_to_unsigned_int32(code): + """ + Convert negative result and error codes from win32com + """ + if code < 0: + code = code + 2**32 + return code + + def _get_date_time_format(dt_string): """ Copied from win_system.py (_get_date_time_format) @@ -313,19 +322,20 @@ def _save_task_definition( except pythoncom.com_error as error: hr, msg, exc, arg = error.args # pylint: disable=W0633 + error_code = _signed_to_unsigned_int32(exc[5]) fc = { - -2147024773: ( + 0x8007007B: ( "The filename, directory name, or volume label syntax is incorrect" ), - -2147024894: "The system cannot find the file specified", - -2147216615: "Required element or attribute missing", - -2147216616: "Value incorrectly formatted or out of range", - -2147352571: "Access denied", + 0x80070002: "The system cannot find the file specified", + 0x80041319: "Required element or attribute missing", + 0x80041318: "Value incorrectly formatted or out of range", + 0x80020005: "Access denied", } try: - failure_code = fc[exc[5]] + failure_code = fc[error_code] except KeyError: - failure_code = f"Unknown Failure: {error}" + failure_code = f"Unknown Failure: {hex(error_code)}" log.debug("Failed to modify task: %s", failure_code) @@ -685,7 +695,7 @@ def create_task_from_xml( except pythoncom.com_error as error: hr, msg, exc, arg = error.args # pylint: disable=W0633 - error_code = hex(exc[5] + 2**32) + error_code = _signed_to_unsigned_int32(exc[5]) fc = { 0x80041319: "Required element or attribute missing", 0x80041318: "Value incorrectly formatted or out of range", @@ -733,7 +743,7 @@ def create_task_from_xml( try: failure_code = fc[error_code] except KeyError: - failure_code = f"Unknown Failure: {error_code}" + failure_code = f"Unknown Failure: {hex(error_code)}" finally: log.debug("Failed to create task: %s", failure_code) raise CommandExecutionError(failure_code) @@ -1471,10 +1481,16 @@ def info(name, location="\\"): task_folder = task_service.GetFolder(location) task = task_folder.GetTask(name) + last_task_result_code = _signed_to_unsigned_int32(task.LastTaskResult) + try: + last_task_result = results[last_task_result_code] + except KeyError: + last_task_result = f"Unknown Task Result: {hex(last_task_result_code)}" + properties = { "enabled": task.Enabled, "last_run": _get_date_value(task.LastRunTime), - "last_run_result": results[task.LastTaskResult], + "last_run_result": last_task_result, "missed_runs": task.NumberOfMissedRuns, "next_run": _get_date_value(task.NextRunTime), "status": states[task.State], From de651d6701e669e8c4274271bf58b3ef24991350 Mon Sep 17 00:00:00 2001 From: hurzhurz Date: Thu, 2 May 2024 15:55:41 +0200 Subject: [PATCH 003/160] add tests --- tests/pytests/unit/modules/test_win_task.py | 91 +++++++++++++++++++++ 1 file changed, 91 insertions(+) diff --git a/tests/pytests/unit/modules/test_win_task.py b/tests/pytests/unit/modules/test_win_task.py index ee61d739b99..fa405125d11 100644 --- a/tests/pytests/unit/modules/test_win_task.py +++ b/tests/pytests/unit/modules/test_win_task.py @@ -7,6 +7,7 @@ from datetime import datetime import pytest import salt.modules.win_task as win_task +from salt.exceptions import CommandExecutionError pytestmark = [ pytest.mark.skip_unless_on_windows, @@ -85,3 +86,93 @@ def test_edit_task_delete_after(base_task): result = win_task.info(base_task) assert result["settings"]["delete_after"] is False + + +def test_execution_time_limit(base_task): + result = win_task.add_trigger( + base_task, + trigger_type="Daily", + trigger_enabled=True, + end_date=datetime.today().strftime("%Y-%m-%d"), + end_time="23:59:59", + ) + assert result is True + + result = win_task.edit_task(base_task, execution_time_limit="1 hour") + assert result is True + + result = win_task.info(base_task) + assert result["settings"]["execution_time_limit"] == "1 hour" + + result = win_task.edit_task(base_task, execution_time_limit=False) + assert result is True + + result = win_task.info(base_task) + assert result["settings"]["execution_time_limit"] is False + + +@pytest.mark.parametrize( + "exitcode, expect", + [ + (0, "The operation completed successfully"), + (3221225786, "The application terminated as a result of CTRL+C"), + (4289449455, "Unknown Task Result: 0xffabcdef"), + ], +) +def test_run_result_code(exitcode, expect): + task_name = "SaltTest" + try: + result = win_task.create_task( + task_name, + user_name="System", + force=True, + action_type="Execute", + cmd="cmd.exe", + arguments=f"/c exit {exitcode}", + ) + assert result is True + + result = win_task.info(task_name) + assert result["last_run_result"] == "Task has not yet run" + + result = win_task.run_wait(task_name) + assert result is True + + result = win_task.info(task_name) + assert result["last_run_result"] == expect + finally: + result = win_task.delete_task(task_name) + assert result is True + + +def test_create_task_from_xml(): + task_name = "SaltTest" + task_xml = 'cmd.exe/c exit' + try: + result = win_task.create_task_from_xml( + task_name, user_name="System", xml_text=task_xml + ) + assert result is True + + result = win_task.info(task_name) + assert result["actions"][0]["action_type"] == "Execute" + assert result["actions"][0]["cmd"] == "cmd.exe" + assert result["actions"][0]["arguments"] == "/c exit" + + finally: + result = win_task.delete_task(task_name) + assert result is True + + +def test_create_task_from_xml_error(): + task_name = "SaltTest" + try: + with pytest.raises(CommandExecutionError) as excinfo: + result = win_task.create_task_from_xml( + task_name, user_name="System", xml_text="test" + ) + assert result is False + assert "The task XML is malformed" in str(excinfo.value) + finally: + result = win_task.delete_task(task_name) + assert result is not True From 3baaf79cecb266882fb63b9daa371e05b36eab5c Mon Sep 17 00:00:00 2001 From: hurzhurz Date: Thu, 23 May 2024 14:15:34 +0200 Subject: [PATCH 004/160] add changelog --- changelog/66347.fixed.md | 1 + changelog/66441.fixed.md | 1 + 2 files changed, 2 insertions(+) create mode 100644 changelog/66347.fixed.md create mode 100644 changelog/66441.fixed.md diff --git a/changelog/66347.fixed.md b/changelog/66347.fixed.md new file mode 100644 index 00000000000..e61e5ce64a9 --- /dev/null +++ b/changelog/66347.fixed.md @@ -0,0 +1 @@ +Fix win_task ExecutionTimeLimit and result/error code interpretation diff --git a/changelog/66441.fixed.md b/changelog/66441.fixed.md new file mode 100644 index 00000000000..e61e5ce64a9 --- /dev/null +++ b/changelog/66441.fixed.md @@ -0,0 +1 @@ +Fix win_task ExecutionTimeLimit and result/error code interpretation From eb817ac64c73d6031ce334c273e518ed00292c0f Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Tue, 28 May 2024 16:35:32 -0500 Subject: [PATCH 005/160] Simplify CODEOWNERS file for PR management --- .github/CODEOWNERS | 53 +--------------------------------------------- 1 file changed, 1 insertion(+), 52 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 3c066bd4837..2a89a565d6f 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -9,55 +9,4 @@ # This file uses an fnmatch-style matching pattern. # Team Core -* @saltstack/team-core - -# Team Boto -salt/*/*boto* @saltstack/team-core - -# Team Cloud -salt/cloud/* @saltstack/team-core -salt/utils/openstack/* @saltstack/team-core -salt/utils/aws.py @saltstack/team-core -salt/*/*cloud* @saltstack/team-core - -# Team NetAPI -salt/cli/api.py @saltstack/team-core -salt/client/netapi.py @saltstack/team-core -salt/netapi/* @saltstack/team-core - -# Team Network -salt/proxy/* @saltstack/team-core - -# Team SPM -salt/cli/spm.py @saltstack/team-core -salt/spm/* @saltstack/team-core - -# Team SSH -salt/cli/ssh.py @saltstack/team-core -salt/client/ssh/* @saltstack/team-core -salt/roster/* @saltstack/team-core -salt/runners/ssh.py @saltstack/team-core -salt/*/thin.py @saltstack/team-core - -# Team State -salt/state.py @saltstack/team-core - -# Team SUSE -salt/*/*btrfs* @saltstack/team-core -salt/*/*kubernetes* @saltstack/team-core -salt/*/*pkg* @saltstack/team-core -salt/*/*snapper* @saltstack/team-core -salt/*/*xfs* @saltstack/team-core -salt/*/*zypper* @saltstack/team-core - -# Team Transport -salt/transport/* @saltstack/team-core -salt/utils/zeromq.py @saltstack/team-core - -# Team Windows -salt/*/*win* @saltstack/team-core -salt/modules/reg.py @saltstack/team-core -salt/states/reg.py @saltstack/team-core -tests/*/*win* @saltstack/team-core -tests/*/test_reg.py @saltstack/team-core -tests/pytests/* @saltstack/team-core @s0undt3ch +* @saltstack/salt-core-maintainers From 38faa6806bf9d1754d478f6225e55ba802dbd52e Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Tue, 14 May 2024 15:22:04 -0500 Subject: [PATCH 006/160] Migrate to new internal pypi proxy --- .github/workflows/build-deps-ci-action.yml | 15 +++++++++++---- .github/workflows/build-deps-onedir.yml | 7 +++++-- .github/workflows/build-docs.yml | 3 +-- .github/workflows/build-packages.yml | 9 ++++++--- .github/workflows/build-salt-onedir.yml | 10 ++++++++-- .github/workflows/ci.yml | 2 ++ .github/workflows/lint-action.yml | 3 +-- .github/workflows/nightly.yml | 2 ++ .github/workflows/release-tag.yml | 5 +++-- .github/workflows/release-upload-virustotal.yml | 5 +++-- .github/workflows/scheduled.yml | 2 ++ .github/workflows/templates/ci.yml.jinja | 2 ++ .github/workflows/test-action-linux.yml | 11 ++++++++--- .github/workflows/test-action-macos.yml | 11 +++++++++-- .github/workflows/test-action-windows.yml | 11 ++++++++--- .../workflows/test-package-downloads-action.yml | 8 ++++++-- .github/workflows/test-packages-action-linux.yml | 5 +++-- .github/workflows/test-packages-action-macos.yml | 9 +++++++-- .../workflows/test-packages-action-windows.yml | 5 +++-- tools/vm.py | 2 +- 20 files changed, 91 insertions(+), 36 deletions(-) diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml index 73c328a97f7..a7d2d3da5fa 100644 --- a/.github/workflows/build-deps-ci-action.yml +++ b/.github/workflows/build-deps-ci-action.yml @@ -40,8 +40,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" @@ -52,6 +53,8 @@ jobs: runs-on: ubuntu-latest outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} + env: + PIP_INDEX_URL: https://pypi.org/simple steps: - name: "Throttle Builds" @@ -66,6 +69,8 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cache-prefix: ${{ inputs.cache-prefix }} + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Generate Test Matrix id: generate-matrix @@ -123,7 +128,7 @@ jobs: - name: PyPi Proxy if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt - name: Setup Python Tools Scripts if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' @@ -195,6 +200,8 @@ jobs: fail-fast: false matrix: include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }} + env: + PIP_INDEX_URL: https://pypi.org/simple steps: - name: "Throttle Builds" @@ -321,7 +328,7 @@ jobs: - name: PyPi Proxy if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt - name: Setup Python Tools Scripts if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 26a5812cc94..1502f662d1a 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -32,8 +32,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" jobs: @@ -97,6 +98,7 @@ jobs: - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} env: USE_S3_CACHE: 'false' + PIP_INDEX_URL: https://pypi.org/simple steps: - name: "Throttle Builds" @@ -147,6 +149,7 @@ jobs: runs-on: windows-latest env: USE_S3_CACHE: 'false' + PIP_INDEX_URL: https://pypi.org/simple steps: - name: "Throttle Builds" diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 64c19ccb8a5..adeeb2fff67 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -17,8 +17,7 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: https://pypi.org/simple PIP_DISABLE_PIP_VERSION_CHECK: "1" jobs: diff --git a/.github/workflows/build-packages.yml b/.github/workflows/build-packages.yml index 652bfde94fc..443c1b12260 100644 --- a/.github/workflows/build-packages.yml +++ b/.github/workflows/build-packages.yml @@ -39,8 +39,9 @@ on: env: COLUMNS: 190 - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" jobs: @@ -54,7 +55,8 @@ jobs: arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} source: - ${{ inputs.source }} - + env: + PIP_INDEX_URL: https://pypi.org/simple runs-on: - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} @@ -360,6 +362,7 @@ jobs: SM_CLIENT_CERT_PASSWORD: "${{ secrets.WIN_SIGN_CERT_PASSWORD }}" SM_CLIENT_CERT_FILE_B64: "${{ secrets.WIN_SIGN_CERT_FILE_B64 }}" WIN_SIGN_CERT_SHA1_HASH: "${{ secrets.WIN_SIGN_CERT_SHA1_HASH }}" + PIP_INDEX_URL: https://pypi.org/simple steps: - name: Check Package Signing Enabled diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 73f9533fb51..5913038bbd2 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -32,8 +32,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" jobs: @@ -102,6 +103,8 @@ jobs: arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} runs-on: - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} + env: + PIP_INDEX_URL: https://pypi.org/simple steps: - name: "Throttle Builds" @@ -156,6 +159,9 @@ jobs: - x86 - amd64 runs-on: windows-latest + env: + PIP_INDEX_URL: https://pypi.org/simple + steps: - name: "Throttle Builds" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 19308bca406..717682ee55d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2007,6 +2007,8 @@ jobs: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} runs-on: ubuntu-latest + env: + PIP_INDEX_URL: https://pypi.org/simple needs: - prepare-workflow - build-ci-deps diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml index 9dc4be360e3..3e1ce1928c7 100644 --- a/.github/workflows/lint-action.yml +++ b/.github/workflows/lint-action.yml @@ -11,8 +11,7 @@ on: env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: https://pypi.org/simple PIP_DISABLE_PIP_VERSION_CHECK: "1" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index df328362644..23b276fbfc3 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2069,6 +2069,8 @@ jobs: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} runs-on: ubuntu-latest + env: + PIP_INDEX_URL: https://pypi.org/simple needs: - prepare-workflow - build-ci-deps diff --git a/.github/workflows/release-tag.yml b/.github/workflows/release-tag.yml index 66c16da7f97..bc4624ef086 100644 --- a/.github/workflows/release-tag.yml +++ b/.github/workflows/release-tag.yml @@ -19,8 +19,9 @@ on: env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} permissions: diff --git a/.github/workflows/release-upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml index 50e71594d50..431ea00039a 100644 --- a/.github/workflows/release-upload-virustotal.yml +++ b/.github/workflows/release-upload-virustotal.yml @@ -20,8 +20,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} jobs: upload-virustotal: diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 8beee3f1ae8..5c355b5dde5 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -2046,6 +2046,8 @@ jobs: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} runs-on: ubuntu-latest + env: + PIP_INDEX_URL: https://pypi.org/simple needs: - prepare-workflow - build-ci-deps diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 636c327f19c..eef2e77ba84 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -306,6 +306,8 @@ name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} runs-on: ubuntu-latest + env: + PIP_INDEX_URL: https://pypi.org/simple needs: - prepare-workflow <%- for need in test_salt_needs.iter(consume=False) %> diff --git a/.github/workflows/test-action-linux.yml b/.github/workflows/test-action-linux.yml index 0c5c5776327..38a74394403 100644 --- a/.github/workflows/test-action-linux.yml +++ b/.github/workflows/test-action-linux.yml @@ -71,8 +71,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" @@ -98,6 +99,8 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cache-prefix: ${{ inputs.cache-prefix }} + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Generate Test Matrix id: generate-matrix @@ -162,7 +165,7 @@ jobs: - name: PyPi Proxy run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -310,6 +313,8 @@ jobs: needs: - test - generate-matrix + env: + PIP_INDEX_URL: https://pypi.org/simple steps: - name: Checkout Source Code diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 95b8e86f43d..1e3a873a13c 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -68,8 +68,9 @@ on: env: COLUMNS: 190 - PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/" - PIP_EXTRA_INDEX_URL: "https://pypi.org/simple" + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" @@ -95,6 +96,8 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cache-prefix: ${{ inputs.cache-prefix }} + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Generate Test Matrix id: generate-matrix @@ -165,6 +168,8 @@ jobs: - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Decompress .nox Directory run: | @@ -338,6 +343,8 @@ jobs: needs: - test - generate-matrix + env: + PIP_INDEX_URL: https://pypi.org/simple steps: - name: Checkout Source Code diff --git a/.github/workflows/test-action-windows.yml b/.github/workflows/test-action-windows.yml index 833b5758524..e40354b9489 100644 --- a/.github/workflows/test-action-windows.yml +++ b/.github/workflows/test-action-windows.yml @@ -71,8 +71,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" @@ -98,6 +99,8 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cache-prefix: ${{ inputs.cache-prefix }} + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Generate Test Matrix id: generate-matrix @@ -162,7 +165,7 @@ jobs: - name: PyPi Proxy run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -311,6 +314,8 @@ jobs: needs: - test - generate-matrix + env: + PIP_INDEX_URL: https://pypi.org/simple steps: - name: Checkout Source Code diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index 7532813999a..22e3e58bcfb 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -48,8 +48,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" @@ -74,6 +75,8 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cache-prefix: ${{ inputs.cache-prefix }} + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Generate Test Matrix id: generate-matrix @@ -296,6 +299,7 @@ jobs: runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} env: USE_S3_CACHE: 'false' + PIP_INDEX_URL: https://pypi.org/simple environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: diff --git a/.github/workflows/test-packages-action-linux.yml b/.github/workflows/test-packages-action-linux.yml index a5c00db88ea..432b8e04bb4 100644 --- a/.github/workflows/test-packages-action-linux.yml +++ b/.github/workflows/test-packages-action-linux.yml @@ -65,8 +65,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" USE_S3_CACHE: 'true' diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 2e27f1e9849..5e8c3069178 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -62,8 +62,9 @@ on: env: COLUMNS: 190 - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" @@ -162,6 +163,8 @@ jobs: - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' + env: + PIP_INDEX_URL: https://pypi.org/simple - name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }} uses: actions/download-artifact@v4 @@ -263,3 +266,5 @@ jobs: - name: Install Nox run: | python3 -m pip install 'nox==${{ inputs.nox-version }}' + env: + PIP_INDEX_URL: https://pypi.org/simple diff --git a/.github/workflows/test-packages-action-windows.yml b/.github/workflows/test-packages-action-windows.yml index 967482ac204..c21100f4e69 100644 --- a/.github/workflows/test-packages-action-windows.yml +++ b/.github/workflows/test-packages-action-windows.yml @@ -65,8 +65,9 @@ env: COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple + PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }} + PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }} + PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }} PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" diff --git a/tools/vm.py b/tools/vm.py index d9e9c1e6e5b..13103eda912 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -1469,7 +1469,7 @@ class VM: cmd += ["--"] + session_args if env is None: env = {} - for key in ("CI", "PIP_INDEX_URL", "PIP_EXTRA_INDEX_URL"): + for key in ("CI", "PIP_INDEX_URL", "PIP_TRUSTED_HOST", "PIP_EXTRA_INDEX_URL"): if key in os.environ: env[key] = os.environ[key] env["PYTHONUTF8"] = "1" From 1290f93d06215d91a7b02290d9fb2ae4ebd4e2ac Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 26 Jan 2024 15:00:33 -0700 Subject: [PATCH 007/160] WIP - Handling GitFS locking issue and resource loss due to SIGTERM not clearing up --- salt/utils/gitfs.py | 76 +++++++++++++++++++++++++++++++++++++----- salt/utils/platform.py | 14 ++++++++ salt/utils/process.py | 11 +++++- 3 files changed, 91 insertions(+), 10 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index aa9190b6ee3..eae050eaf4d 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -247,6 +247,9 @@ class GitProvider: def _val_cb(x, y): return str(y) + # DGM try getting machine_identifier + self.mach_id = salt.utils.platform.get_machine_identifier() + log.debug(f"DGM getting machine_id for lock file, machine_id '{self.mach_id}'") self.global_saltenv = salt.utils.data.repack_dictlist( self.opts.get(f"{self.role}_saltenv", []), @@ -751,7 +754,12 @@ class GitProvider: except OSError as exc: if exc.errno == errno.ENOENT: # No lock file present - pass + msg = "Attempt to remove lock {} for file ({}) which was not found to exist : {} ".format( + self.url, lock_file, exc + ) + log.debug(msg) + # DGM pass + elif exc.errno == errno.EISDIR: # Somehow this path is a directory. Should never happen # unless some wiseguy manually creates a directory at this @@ -903,6 +911,16 @@ class GitProvider: self._get_lock_file(lock_type="update"), self.role, ) + else: + log.warning( + "Update lock file generated an unexpected exception for %s remote '%s', " + "The lock file %s for %s type=update operation, exception: %s .", + self.role, + self.id, + self._get_lock_file(lock_type="update"), + self.role, + str(exc), + ) return False def _lock(self, lock_type="update", failhard=False): @@ -930,6 +948,9 @@ class GitProvider: with os.fdopen(fh_, "wb"): # Write the lock file and close the filehandle os.write(fh_, salt.utils.stringutils.to_bytes(str(os.getpid()))) + os.write(fh_, salt.utils.stringutils.to_bytes("\n")) + os.write(fh_, salt.utils.stringutils.to_bytes(str(self.mach_id))) + except OSError as exc: if exc.errno == errno.EEXIST: with salt.utils.files.fopen(self._get_lock_file(lock_type), "r") as fd_: @@ -937,44 +958,61 @@ class GitProvider: pid = int( salt.utils.stringutils.to_unicode(fd_.readline()).rstrip() ) + mach_id = int( + salt.utils.stringutils.to_unicode(fd_.readline()).rstrip() + ) except ValueError: # Lock file is empty, set pid to 0 so it evaluates as # False. pid = 0 + mach_id = 0 global_lock_key = self.role + "_global_lock" lock_file = self._get_lock_file(lock_type=lock_type) if self.opts[global_lock_key]: msg = ( "{} is enabled and {} lockfile {} is present for " - "{} remote '{}'.".format( + "{} remote '{}' on machine_id {}.".format( global_lock_key, lock_type, lock_file, self.role, self.id, + self.mach_id, ) ) if pid: msg += f" Process {pid} obtained the lock" + if self.mach_id or mach_id: + msg += " Process {} obtained the lock for machine_id {}, current machine_id {}".format( + pid, mach_id, self.mach_id + ) + else: + msg += " Process {} obtained the lock".format(pid) + if not pid_exists(pid): msg += ( " but this process is not running. The " "update may have been interrupted. If " "using multi-master with shared gitfs " "cache, the lock may have been obtained " - "by another master." + "by another master" ) + if self.mach_id != mach_id: + msg += ", with machine_id {}".format(mach_id) + else: + msg += "." log.warning(msg) if failhard: raise return elif pid and pid_exists(pid): log.warning( - "Process %d has a %s %s lock (%s)", + "Process %d has a %s %s lock (%s) on machine_id %s", pid, self.role, lock_type, lock_file, + self.mach_id, ) if failhard: raise @@ -982,12 +1020,13 @@ class GitProvider: else: if pid: log.warning( - "Process %d has a %s %s lock (%s), but this " + "Process %d has a %s %s lock (%s) on machine_id %s, but this " "process is not running. Cleaning up lock file.", pid, self.role, lock_type, lock_file, + self.mach_id, ) success, fail = self._clear_lock() if success: @@ -996,12 +1035,17 @@ class GitProvider: raise return else: - msg = "Unable to set {} lock for {} ({}): {} ".format( - lock_type, self.id, self._get_lock_file(lock_type), exc + msg = "Unable to set {} lock for {} ({}) on machine_id {}: {} ".format( + lock_type, + self.id, + self._get_lock_file(lock_type), + self.mach_id, + exc, ) log.error(msg, exc_info=True) raise GitLockError(exc.errno, msg) - msg = f"Set {lock_type} lock for {self.role} remote '{self.id}'" + + msg = f"Set {lock_type} lock for {self.role} remote {self.id} on machine_id '{self.mach_id}'" log.debug(msg) return msg @@ -1018,6 +1062,15 @@ class GitProvider: try: result = self._lock(lock_type="update") except GitLockError as exc: + log.warning( + "Update lock file generated an unexpected exception for %s remote '%s', " + "The lock file %s for %s type=update operation, exception: %s .", + self.role, + self.id, + self._get_lock_file(lock_type="update"), + self.role, + str(exc), + ) failed.append(exc.strerror) else: if result is not None: @@ -1055,10 +1108,15 @@ class GitProvider: try: self._lock(lock_type=lock_type, failhard=True) lock_set = True - yield + # docs state need to yield a single value, lock_set will do + yield lock_set + # Break out of his loop once we've yielded the lock, to # avoid continued attempts to iterate and establish lock + # just ensuring lock_set is true (belts and braces) + lock_set = True break + except (OSError, GitLockError) as exc: if not timeout or time.time() - time_start > timeout: raise GitLockError(exc.errno, exc.strerror) diff --git a/salt/utils/platform.py b/salt/utils/platform.py index c6ca7fe8cae..b4f23034ba6 100644 --- a/salt/utils/platform.py +++ b/salt/utils/platform.py @@ -12,6 +12,7 @@ import sys import distro from salt.utils.decorators import memoize as real_memoize +from salt.utils.files import fopen as _fopen def linux_distribution(full_distribution_name=True): @@ -239,3 +240,16 @@ def spawning_platform(): Salt, however, will force macOS to spawning by default on all python versions """ return multiprocessing.get_start_method(allow_none=False) == "spawn" + + +def get_machine_identifier(): + """ + Provide the machine-identifier for machine/virtualization combination + """ + locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] + existing_locations = [loc for loc in locations if os.path.exists(loc)] + if not existing_locations: + return "" + else: + with _fopen(existing_locations[0]) as machineid: + return machineid.read().strip() diff --git a/salt/utils/process.py b/salt/utils/process.py index 5524f2105f2..964b91e08d2 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1069,9 +1069,14 @@ class SignalHandlingProcess(Process): msg += "SIGTERM" msg += ". Exiting" log.debug(msg) + + mach_id = salt.utils.platform.get_machine_identifier() + log.debug(f"DGM exiting for machine identifer '{mach_id}'") + if HAS_PSUTIL: try: - process = psutil.Process(os.getpid()) + cur_pid = os.getpid() + process = psutil.Process(cur_pid) if hasattr(process, "children"): for child in process.children(recursive=True): try: @@ -1084,6 +1089,10 @@ class SignalHandlingProcess(Process): self.pid, os.getpid(), ) + + # DGM need to go through and clean up any resources left around like lock files + # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + except psutil.NoSuchProcess: log.warning( "Unable to kill children of process %d, it does not exist." From d58a6897e6d0bf6cb541ef45abcc9e5168ca9b2f Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 1 Feb 2024 13:37:29 -0700 Subject: [PATCH 008/160] Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM, tests to be done --- changelog/65816.fixed.md | 1 + salt/utils/gitfs.py | 22 ++++++------ salt/utils/process.py | 78 ++++++++++++++++++++++++++++++++++++++-- 3 files changed, 89 insertions(+), 12 deletions(-) create mode 100644 changelog/65816.fixed.md diff --git a/changelog/65816.fixed.md b/changelog/65816.fixed.md new file mode 100644 index 00000000000..23aaa1e5e8e --- /dev/null +++ b/changelog/65816.fixed.md @@ -0,0 +1 @@ +Fix for GitFS failure to unlock lock file, and resource cleanup for process SIGTERM diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index eae050eaf4d..6d67f7c241d 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -247,9 +247,11 @@ class GitProvider: def _val_cb(x, y): return str(y) + # DGM try getting machine_identifier + # get machine_identifier self.mach_id = salt.utils.platform.get_machine_identifier() - log.debug(f"DGM getting machine_id for lock file, machine_id '{self.mach_id}'") + log.debug(f"machine_id for lock file, machine_id '{self.mach_id}'") self.global_saltenv = salt.utils.data.repack_dictlist( self.opts.get(f"{self.role}_saltenv", []), @@ -754,11 +756,8 @@ class GitProvider: except OSError as exc: if exc.errno == errno.ENOENT: # No lock file present - msg = "Attempt to remove lock {} for file ({}) which was not found to exist : {} ".format( - self.url, lock_file, exc - ) + msg = f"Attempt to remove lock {self.url} for file ({lock_file}) which was not found to exist, exception : {exc} " log.debug(msg) - # DGM pass elif exc.errno == errno.EISDIR: # Somehow this path is a directory. Should never happen @@ -1101,20 +1100,21 @@ class GitProvider: if poll_interval > timeout: poll_interval = timeout - lock_set = False + lock_set1 = False + lock_set2 = False try: time_start = time.time() while True: try: self._lock(lock_type=lock_type, failhard=True) - lock_set = True + lock_set1 = True # docs state need to yield a single value, lock_set will do - yield lock_set + yield lock_set1 # Break out of his loop once we've yielded the lock, to # avoid continued attempts to iterate and establish lock # just ensuring lock_set is true (belts and braces) - lock_set = True + lock_set2 = True break except (OSError, GitLockError) as exc: @@ -1132,7 +1132,9 @@ class GitProvider: time.sleep(poll_interval) continue finally: - if lock_set: + if lock_set1 or lock_set2: + msg = f"Attempting to remove '{lock_type}' lock for '{self.role}' remote '{self.id}' due to lock_set1 '{lock_set1}' or lock_set2 '{lock_set2}'" + log.debug(msg) self.clear_lock(lock_type=lock_type) def init_remote(self): diff --git a/salt/utils/process.py b/salt/utils/process.py index 964b91e08d2..56e75d55ccf 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -14,12 +14,14 @@ import multiprocessing import multiprocessing.util import os import queue +import shutil import signal import socket import subprocess import sys import threading import time +from pathlib import Path import salt._logging import salt.defaults.exitcodes @@ -1071,7 +1073,9 @@ class SignalHandlingProcess(Process): log.debug(msg) mach_id = salt.utils.platform.get_machine_identifier() - log.debug(f"DGM exiting for machine identifer '{mach_id}'") + log.debug( + f"exiting for process id '{os.getpid()}' and machine identifer '{mach_id}'" + ) if HAS_PSUTIL: try: @@ -1090,8 +1094,78 @@ class SignalHandlingProcess(Process): os.getpid(), ) - # DGM need to go through and clean up any resources left around like lock files + # need to go through and clean up any resources left around like lock files if using gitfs # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + gitfs_active = self.opts.get("gitfs_remotes", None) + if gitfs_active: + # check for gitfs file locks to ensure no resource leaks + # last chance to clean up any missed unlock droppings + cache_dir = Path("/var/cache/salt/master/gitfs/work") + if cache_dir.exists and cache_dir.is_dir(): + file_list = list(cache_dir.glob("**/*.lk")) + file_del_list = [] + + try: + for file_name in file_list: + with salt.utils.files.fopen(file_name, "r") as fd_: + try: + file_pid = int( + salt.utils.stringutils.to_unicode( + fd_.readline() + ).rstrip() + ) + except ValueError: + # Lock file is empty, set pid to 0 so it evaluates as False. + file_pid = 0 + try: + file_mach_id = int( + salt.utils.stringutils.to_unicode( + fd_.readline() + ).rstrip() + ) + except ValueError: + # Lock file is empty, set mach_id to 0 so it evaluates as False. + file_mach_id = 0 + if cur_pid == file_pid: + if mach_id != file_mach_id: + if not file_mach_id: + msg = f"gitfs lock file for pid '{file_pid}' does not contain a machine id, deleting lock file which may affect if using multi-master with shared gitfs cache, the lock may have been obtained by another master recommend updating Salt version on other masters to a version which insert machine identification in lock a file." + log.debug(msg) + file_del_list.append( + (file_name, file_pid, file_mach_id) + ) + else: + file_del_list.append( + (file_name, file_pid, file_mach_id) + ) + + except FileNotFoundError: + log.debug("gitfs lock file: %s not found", file_name) + + for (file_name, file_pid, file_mach_id) in file_del_list: + try: + os.remove(file_name) + except OSError as exc: + if exc.errno == errno.ENOENT: + # No lock file present + msg = f"SIGTERM clean up of resources attempted to remove lock file {file_name}, pid '{file_pid}', machine identifier '{mach_id}' but it did not exist, exception : {exc} " + log.debug(msg) + + elif exc.errno == errno.EISDIR: + # Somehow this path is a directory. Should never happen + # unless some wiseguy manually creates a directory at this + # path, but just in case, handle it. + try: + shutil.rmtree(file_name) + except OSError as exc: + msg = f"SIGTERM clean up of resources, lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}' was a directory, removed directory, exception : '{exc}'" + log.debug(msg) + else: + msg = f"SIGTERM clean up of resources, unable to remove lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}', exception : '{exc}'" + log.debug(msg) + else: + msg = f"SIGTERM clean up of resources, removed lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}'" + log.debug(msg) except psutil.NoSuchProcess: log.warning( From 563556eade299f8ce01c828bd509a4bc7e2026fc Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 1 Feb 2024 16:52:44 -0700 Subject: [PATCH 009/160] Updated gitfs tests for machine identifier --- tests/pytests/functional/utils/gitfs/test_gitfs.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/tests/pytests/functional/utils/gitfs/test_gitfs.py b/tests/pytests/functional/utils/gitfs/test_gitfs.py index 30a5f147faa..36c93797978 100644 --- a/tests/pytests/functional/utils/gitfs/test_gitfs.py +++ b/tests/pytests/functional/utils/gitfs/test_gitfs.py @@ -2,6 +2,7 @@ import os.path import pytest +import salt.utils.platform from salt.fileserver.gitfs import PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES from salt.utils.gitfs import GitFS, GitPython, Pygit2 from salt.utils.immutabletypes import ImmutableDict, ImmutableList @@ -248,17 +249,18 @@ def _test_lock(opts): g.fetch_remotes() assert len(g.remotes) == 1 repo = g.remotes[0] + mach_id = salt.utils.platform.get_machine_identifier() assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ - "Set update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + f"Set update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" ], [], ) assert os.path.isfile(repo._get_lock_file()) assert repo.clear_lock() == ( [ - "Removed update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + "Removed update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" ], [], ) From 0206162c2d16ecb04f9bfe2790f58043620679f2 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 5 Feb 2024 16:02:40 -0700 Subject: [PATCH 010/160] Moved get_machine_identifier temporiarly till sorted where it will be and what --- salt/utils/files.py | 14 ++++++++++++++ salt/utils/gitfs.py | 2 +- salt/utils/platform.py | 14 -------------- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/salt/utils/files.py b/salt/utils/files.py index e5494911c28..5f89e6aba98 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -904,3 +904,17 @@ def get_encoding(path): return "ASCII" raise CommandExecutionError("Could not detect file encoding") + + +## TBD DGM just parking here till final machine identifier work is done +def get_machine_identifier(): + """ + Provide the machine-identifier for machine/virtualization combination + """ + locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] + existing_locations = [loc for loc in locations if os.path.exists(loc)] + if not existing_locations: + return "" + else: + with fopen(existing_locations[0]) as machineid: + return machineid.read().strip() diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 6d67f7c241d..ddc3e499782 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -250,7 +250,7 @@ class GitProvider: # DGM try getting machine_identifier # get machine_identifier - self.mach_id = salt.utils.platform.get_machine_identifier() + self.mach_id = salt.utils.files.get_machine_identifier() log.debug(f"machine_id for lock file, machine_id '{self.mach_id}'") self.global_saltenv = salt.utils.data.repack_dictlist( diff --git a/salt/utils/platform.py b/salt/utils/platform.py index b4f23034ba6..c6ca7fe8cae 100644 --- a/salt/utils/platform.py +++ b/salt/utils/platform.py @@ -12,7 +12,6 @@ import sys import distro from salt.utils.decorators import memoize as real_memoize -from salt.utils.files import fopen as _fopen def linux_distribution(full_distribution_name=True): @@ -240,16 +239,3 @@ def spawning_platform(): Salt, however, will force macOS to spawning by default on all python versions """ return multiprocessing.get_start_method(allow_none=False) == "spawn" - - -def get_machine_identifier(): - """ - Provide the machine-identifier for machine/virtualization combination - """ - locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] - existing_locations = [loc for loc in locations if os.path.exists(loc)] - if not existing_locations: - return "" - else: - with _fopen(existing_locations[0]) as machineid: - return machineid.read().strip() From 0ba93f829ccd4791463a63171ce9929187e855e6 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 8 Feb 2024 09:38:09 -0700 Subject: [PATCH 011/160] Fix up use of get_machine_identifer --- salt/utils/process.py | 2 +- tests/pytests/functional/utils/gitfs/test_gitfs.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/salt/utils/process.py b/salt/utils/process.py index 56e75d55ccf..9cb7633ff9e 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1072,7 +1072,7 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - mach_id = salt.utils.platform.get_machine_identifier() + mach_id = salt.utils.files.get_machine_identifier() log.debug( f"exiting for process id '{os.getpid()}' and machine identifer '{mach_id}'" ) diff --git a/tests/pytests/functional/utils/gitfs/test_gitfs.py b/tests/pytests/functional/utils/gitfs/test_gitfs.py index 36c93797978..fc2cfb303b5 100644 --- a/tests/pytests/functional/utils/gitfs/test_gitfs.py +++ b/tests/pytests/functional/utils/gitfs/test_gitfs.py @@ -2,7 +2,7 @@ import os.path import pytest -import salt.utils.platform +import salt.utils.files from salt.fileserver.gitfs import PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES from salt.utils.gitfs import GitFS, GitPython, Pygit2 from salt.utils.immutabletypes import ImmutableDict, ImmutableList @@ -249,7 +249,7 @@ def _test_lock(opts): g.fetch_remotes() assert len(g.remotes) == 1 repo = g.remotes[0] - mach_id = salt.utils.platform.get_machine_identifier() + mach_id = salt.utils.files.get_machine_identifier() assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ From 2a532aa2a3f5de458133c341d588c02ee0b07010 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 8 Feb 2024 15:04:55 -0700 Subject: [PATCH 012/160] Fix missing typo f-string --- tests/pytests/functional/utils/gitfs/test_gitfs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/functional/utils/gitfs/test_gitfs.py b/tests/pytests/functional/utils/gitfs/test_gitfs.py index fc2cfb303b5..abe050fa811 100644 --- a/tests/pytests/functional/utils/gitfs/test_gitfs.py +++ b/tests/pytests/functional/utils/gitfs/test_gitfs.py @@ -260,7 +260,7 @@ def _test_lock(opts): assert os.path.isfile(repo._get_lock_file()) assert repo.clear_lock() == ( [ - "Removed update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + f"Removed update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" ], [], ) From 1b6e9642441b6d08a724271b37e4d43c9e0f1248 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 9 Feb 2024 11:25:05 -0700 Subject: [PATCH 013/160] Updated format to f-string, and adjusted error messages to match tests --- salt/utils/gitfs.py | 86 ++++++++++++++++----------------------------- 1 file changed, 30 insertions(+), 56 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index ddc3e499782..6ee5ed96f3d 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -770,9 +770,7 @@ class GitProvider: else: _add_error(failed, exc) else: - msg = "Removed {} lock for {} remote '{}'".format( - lock_type, self.role, self.id - ) + msg = f"Removed {lock_type} lock for {self.role} remote '{self.id}' on machine_id '{self.mach_id}'" log.debug(msg) success.append(msg) return success, failed @@ -969,24 +967,15 @@ class GitProvider: lock_file = self._get_lock_file(lock_type=lock_type) if self.opts[global_lock_key]: msg = ( - "{} is enabled and {} lockfile {} is present for " - "{} remote '{}' on machine_id {}.".format( - global_lock_key, - lock_type, - lock_file, - self.role, - self.id, - self.mach_id, - ) + f"{global_lock_key} is enabled and {lock_type} lockfile {lock_file} is present for " + f"{self.role} remote '{self.id}' on machine_id {self.mach_id}." ) if pid: msg += f" Process {pid} obtained the lock" if self.mach_id or mach_id: - msg += " Process {} obtained the lock for machine_id {}, current machine_id {}".format( - pid, mach_id, self.mach_id - ) + msg += f" Process {pid} obtained the lock for machine_id {mach_id}, current machine_id {self.mach_id}" else: - msg += " Process {} obtained the lock".format(pid) + msg += f" Process {pid} obtained the lock" if not pid_exists(pid): msg += ( @@ -997,7 +986,7 @@ class GitProvider: "by another master" ) if self.mach_id != mach_id: - msg += ", with machine_id {}".format(mach_id) + msg += f", with machine_id {mach_id}" else: msg += "." log.warning(msg) @@ -1034,17 +1023,11 @@ class GitProvider: raise return else: - msg = "Unable to set {} lock for {} ({}) on machine_id {}: {} ".format( - lock_type, - self.id, - self._get_lock_file(lock_type), - self.mach_id, - exc, - ) + msg = f"Unable to set {lock_type} lock for {self.id} ({self._get_lock_file(lock_type)}) on machine_id {self.mach_id}: {exc}" log.error(msg, exc_info=True) raise GitLockError(exc.errno, msg) - msg = f"Set {lock_type} lock for {self.role} remote {self.id} on machine_id '{self.mach_id}'" + msg = f"Set {lock_type} lock for {self.role} remote '{self.id}' on machine_id '{self.mach_id}'" log.debug(msg) return msg @@ -1424,9 +1407,7 @@ class GitPython(GitProvider): # function. raise GitLockError( exc.errno, - "Checkout lock exists for {} remote '{}'".format( - self.role, self.id - ), + f"Checkout lock exists for {self.role} remote '{self.id}'", ) else: log.error( @@ -1775,9 +1756,7 @@ class Pygit2(GitProvider): # function. raise GitLockError( exc.errno, - "Checkout lock exists for {} remote '{}'".format( - self.role, self.id - ), + f"Checkout lock exists for {self.role} remote '{self.id}'", ) else: log.error( @@ -2292,10 +2271,8 @@ class Pygit2(GitProvider): if not self.ssl_verify: warnings.warn( "pygit2 does not support disabling the SSL certificate " - "check in versions prior to 0.23.2 (installed: {}). " - "Fetches for self-signed certificates will fail.".format( - PYGIT2_VERSION - ) + f"check in versions prior to 0.23.2 (installed: {PYGIT2_VERSION}). " + "Fetches for self-signed certificates will fail." ) def verify_auth(self): @@ -2548,11 +2525,12 @@ class GitBase: if self.provider in AUTH_PROVIDERS: override_params += AUTH_PARAMS elif global_auth_params: + msg_auth_providers = "{}".format(", ".join(AUTH_PROVIDERS)) msg = ( - "{0} authentication was configured, but the '{1}' " - "{0}_provider does not support authentication. The " - "providers for which authentication is supported in {0} " - "are: {2}.".format(self.role, self.provider, ", ".join(AUTH_PROVIDERS)) + f"{self.role} authentication was configured, but the '{self.provider}' " + f"{self.role}_provider does not support authentication. The " + f"providers for which authentication is supported in {self.role} " + f"are: {msg_auth_providers}." ) if self.role == "gitfs": msg += ( @@ -2935,15 +2913,13 @@ class GitBase: errors = [] if GITPYTHON_VERSION < GITPYTHON_MINVER: errors.append( - "{} is configured, but the GitPython version is earlier than " - "{}. Version {} detected.".format( - self.role, GITPYTHON_MINVER, GITPYTHON_VERSION - ) + f"{self.role} is configured, but the GitPython version is earlier than " + f"{GITPYTHON_MINVER}. Version {GITPYTHON_VERSION} detected." ) if not salt.utils.path.which("git"): errors.append( "The git command line utility is required when using the " - "'gitpython' {}_provider.".format(self.role) + f"'gitpython' {self.role}_provider." ) if errors: @@ -2982,24 +2958,20 @@ class GitBase: errors = [] if PYGIT2_VERSION < PYGIT2_MINVER: errors.append( - "{} is configured, but the pygit2 version is earlier than " - "{}. Version {} detected.".format( - self.role, PYGIT2_MINVER, PYGIT2_VERSION - ) + f"{self.role} is configured, but the pygit2 version is earlier than " + f"{PYGIT2_MINVER}. Version {PYGIT2_VERSION} detected." ) if LIBGIT2_VERSION < LIBGIT2_MINVER: errors.append( - "{} is configured, but the libgit2 version is earlier than " - "{}. Version {} detected.".format( - self.role, LIBGIT2_MINVER, LIBGIT2_VERSION - ) + f"{self.role} is configured, but the libgit2 version is earlier than " + f"{LIBGIT2_MINVER}. Version {LIBGIT2_VERSION} detected." ) if not getattr(pygit2, "GIT_FETCH_PRUNE", False) and not salt.utils.path.which( "git" ): errors.append( "The git command line utility is required when using the " - "'pygit2' {}_provider.".format(self.role) + f"'pygit2' {self.role}_provider." ) if errors: @@ -3312,10 +3284,11 @@ class GitFS(GitBase): ret = {"hash_type": self.opts["hash_type"]} relpath = fnd["rel"] path = fnd["path"] + lc_hash_type = self.opts["hash_type"] hashdest = salt.utils.path.join( self.hash_cachedir, load["saltenv"], - "{}.hash.{}".format(relpath, self.opts["hash_type"]), + f"{relpath}.hash.{lc_hash_type}", ) try: with salt.utils.files.fopen(hashdest, "rb") as fp_: @@ -3350,13 +3323,14 @@ class GitFS(GitBase): except OSError: log.error("Unable to make cachedir %s", self.file_list_cachedir) return [] + lc_path_adj = load["saltenv"].replace(os.path.sep, "_|-") list_cache = salt.utils.path.join( self.file_list_cachedir, - "{}.p".format(load["saltenv"].replace(os.path.sep, "_|-")), + f"{lc_path_adj}.p", ) w_lock = salt.utils.path.join( self.file_list_cachedir, - ".{}.w".format(load["saltenv"].replace(os.path.sep, "_|-")), + f".{lc_path_adj}.w", ) cache_match, refresh_cache, save_cache = salt.fileserver.check_file_list_cache( self.opts, form, list_cache, w_lock From 40d5a253fe93c1418535d9c03b9ad1abd74130cd Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 13 Feb 2024 10:38:06 -0700 Subject: [PATCH 014/160] Initial port from unittest to pytest of unit/utils/test_gitfs.py to test_gitfs_locks.py --- tests/pytests/unit/utils/test_gitfs_locks.py | 193 +++++++++++++++++++ tests/unit/utils/test_gitfs.py | 175 ----------------- 2 files changed, 193 insertions(+), 175 deletions(-) create mode 100644 tests/pytests/unit/utils/test_gitfs_locks.py delete mode 100644 tests/unit/utils/test_gitfs.py diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py new file mode 100644 index 00000000000..c5260da3d9f --- /dev/null +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -0,0 +1,193 @@ +""" +These only test the provider selection and verification logic, they do not init +any remotes. +""" + +import pytest + +import salt.ext.tornado.ioloop +import salt.fileserver.gitfs +import salt.utils.files +import salt.utils.gitfs +import salt.utils.path +import salt.utils.platform +from tests.support.mixins import AdaptedConfigurationTestCaseMixin + + +def _clear_instance_map(): + try: + del salt.utils.gitfs.GitFS.instance_map[ + salt.ext.tornado.ioloop.IOLoop.current() + ] + except KeyError: + pass + + +@pytest.fixture +def get_tmp_dir(tmp_path): + dirpath = tmp_path / "git_test" + dirpath.mkdir(parents=True) + return dirpath + + ## dirpath.cleanup() + + +class TestGitBase(AdaptedConfigurationTestCaseMixin): + """ + mocked GitFS provider leveraging tmp_path + """ + + def __init__( + self, + ): + class MockedProvider( + salt.utils.gitfs.GitProvider + ): # pylint: disable=abstract-method + def __init__( + self, + opts, + remote, + per_remote_defaults, + per_remote_only, + override_params, + cache_root, + role="gitfs", + ): + self.provider = "mocked" + self.fetched = False + super().__init__( + opts, + remote, + per_remote_defaults, + per_remote_only, + override_params, + cache_root, + role, + ) + + def init_remote(self): + self.gitdir = salt.utils.path.join(get_tmp_dir, ".git") + self.repo = True + new = False + return new + + def envs(self): + return ["base"] + + def fetch(self): + self.fetched = True + + # Clear the instance map so that we make sure to create a new instance + # for this test class. + _clear_instance_map() + + git_providers = { + "mocked": MockedProvider, + } + gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] + + self.opts = self.get_temp_config( + "master", gitfs_remotes=gitfs_remotes, verified_gitfs_provider="mocked" + ) + self.main_class = salt.utils.gitfs.GitFS( + self.opts, + self.opts["gitfs_remotes"], + per_remote_overrides=salt.fileserver.gitfs.PER_REMOTE_OVERRIDES, + per_remote_only=salt.fileserver.gitfs.PER_REMOTE_ONLY, + git_providers=git_providers, + ) + + def tearDown(self): + # Providers are preserved with GitFS's instance_map + for remote in self.main_class.remotes: + remote.fetched = False + del self.main_class + ## self._tmp_dir.cleanup() + + +@pytest.fixture +def main_class(tmp_path): + test_git_base = TestGitBase() + yield test_git_base + + test_git_base.tearDown() + + +def test_update_all(main_class): + main_class.update() + assert len(main_class.remotes) == 2, "Wrong number of remotes" + assert main_class.remotes[0].fetched + assert main_class.remotes[1].fetched + + +def test_update_by_name(main_class): + main_class.update("repo2") + assert len(main_class.remotes) == 2, "Wrong number of remotes" + assert not main_class.remotes[0].fetched + assert main_class.remotes[1].fetched + + +def test_update_by_id_and_name(main_class): + main_class.update([("file://repo1.git", None)]) + assert len(main_class.remotes) == 2, "Wrong number of remotes" + assert main_class.remotes[0].fetched + assert not main_class.remotes[1].fetched + + +def test_get_cachedir_basename(main_class): + assert main_class.remotes[0].get_cache_basename() == "_" + assert main_class.remotes[1].get_cache_basename() == "_" + + +def test_git_provider_mp_lock(): + """ + Check that lock is released after provider.lock() + """ + provider = main_class.remotes[0] + provider.lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() + + +def test_git_provider_mp_clear_lock(main_class): + """ + Check that lock is released after provider.clear_lock() + """ + provider = main_class.remotes[0] + provider.clear_lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() + + +@pytest.mark.slow_test +@pytest.mark.timeout_unless_on_windows(120) +def test_git_provider_mp_lock_timeout(main_class): + """ + Check that lock will time out if master lock is locked. + """ + provider = main_class.remotes[0] + # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. + assert provider._master_lock.acquire(timeout=5) + try: + # git provider should raise timeout error to avoid lock race conditions + pytest.raises(TimeoutError, provider.lock) + finally: + provider._master_lock.release() + + +@pytest.mark.slow_test +@pytest.mark.timeout_unless_on_windows(120) +def test_git_provider_mp_clear_lock_timeout(main_class): + """ + Check that clear lock will time out if master lock is locked. + """ + provider = main_class.remotes[0] + # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. + assert provider._master_lock.acquire(timeout=5) + try: + # git provider should raise timeout error to avoid lock race conditions + pytest.raises(TimeoutError, provider.clear_lock) + finally: + provider._master_lock.release() diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py deleted file mode 100644 index e72f94050c8..00000000000 --- a/tests/unit/utils/test_gitfs.py +++ /dev/null @@ -1,175 +0,0 @@ -""" -These only test the provider selection and verification logic, they do not init -any remotes. -""" - -import tempfile - -import pytest - -import salt.ext.tornado.ioloop -import salt.fileserver.gitfs -import salt.utils.files -import salt.utils.gitfs -import salt.utils.path -import salt.utils.platform -from tests.support.mixins import AdaptedConfigurationTestCaseMixin -from tests.support.unit import TestCase - - -def _clear_instance_map(): - try: - del salt.utils.gitfs.GitFS.instance_map[ - salt.ext.tornado.ioloop.IOLoop.current() - ] - except KeyError: - pass - - -class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): - def setUp(self): - self._tmp_dir = tempfile.TemporaryDirectory() - tmp_name = self._tmp_dir.name - - class MockedProvider( - salt.utils.gitfs.GitProvider - ): # pylint: disable=abstract-method - def __init__( - self, - opts, - remote, - per_remote_defaults, - per_remote_only, - override_params, - cache_root, - role="gitfs", - ): - self.provider = "mocked" - self.fetched = False - super().__init__( - opts, - remote, - per_remote_defaults, - per_remote_only, - override_params, - cache_root, - role, - ) - - def init_remote(self): - self.gitdir = salt.utils.path.join(tmp_name, ".git") - self.repo = True - new = False - return new - - def envs(self): - return ["base"] - - def fetch(self): - self.fetched = True - - git_providers = { - "mocked": MockedProvider, - } - gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] - self.opts = self.get_temp_config( - "master", gitfs_remotes=gitfs_remotes, verified_gitfs_provider="mocked" - ) - self.main_class = salt.utils.gitfs.GitFS( - self.opts, - self.opts["gitfs_remotes"], - per_remote_overrides=salt.fileserver.gitfs.PER_REMOTE_OVERRIDES, - per_remote_only=salt.fileserver.gitfs.PER_REMOTE_ONLY, - git_providers=git_providers, - ) - - @classmethod - def setUpClass(cls): - # Clear the instance map so that we make sure to create a new instance - # for this test class. - _clear_instance_map() - - def tearDown(self): - # Providers are preserved with GitFS's instance_map - for remote in self.main_class.remotes: - remote.fetched = False - del self.main_class - self._tmp_dir.cleanup() - - def test_update_all(self): - self.main_class.update() - self.assertEqual(len(self.main_class.remotes), 2, "Wrong number of remotes") - self.assertTrue(self.main_class.remotes[0].fetched) - self.assertTrue(self.main_class.remotes[1].fetched) - - def test_update_by_name(self): - self.main_class.update("repo2") - self.assertEqual(len(self.main_class.remotes), 2, "Wrong number of remotes") - self.assertFalse(self.main_class.remotes[0].fetched) - self.assertTrue(self.main_class.remotes[1].fetched) - - def test_update_by_id_and_name(self): - self.main_class.update([("file://repo1.git", None)]) - self.assertEqual(len(self.main_class.remotes), 2, "Wrong number of remotes") - self.assertTrue(self.main_class.remotes[0].fetched) - self.assertFalse(self.main_class.remotes[1].fetched) - - def test_get_cachedir_basename(self): - self.assertEqual( - self.main_class.remotes[0].get_cache_basename(), - "_", - ) - self.assertEqual( - self.main_class.remotes[1].get_cache_basename(), - "_", - ) - - def test_git_provider_mp_lock(self): - """ - Check that lock is released after provider.lock() - """ - provider = self.main_class.remotes[0] - provider.lock() - # check that lock has been released - self.assertTrue(provider._master_lock.acquire(timeout=5)) - provider._master_lock.release() - - def test_git_provider_mp_clear_lock(self): - """ - Check that lock is released after provider.clear_lock() - """ - provider = self.main_class.remotes[0] - provider.clear_lock() - # check that lock has been released - self.assertTrue(provider._master_lock.acquire(timeout=5)) - provider._master_lock.release() - - @pytest.mark.slow_test - @pytest.mark.timeout_unless_on_windows(120) - def test_git_provider_mp_lock_timeout(self): - """ - Check that lock will time out if master lock is locked. - """ - provider = self.main_class.remotes[0] - # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. - self.assertTrue(provider._master_lock.acquire(timeout=5)) - try: - # git provider should raise timeout error to avoid lock race conditions - self.assertRaises(TimeoutError, provider.lock) - finally: - provider._master_lock.release() - - @pytest.mark.slow_test - @pytest.mark.timeout_unless_on_windows(120) - def test_git_provider_mp_clear_lock_timeout(self): - """ - Check that clear lock will time out if master lock is locked. - """ - provider = self.main_class.remotes[0] - # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. - self.assertTrue(provider._master_lock.acquire(timeout=5)) - try: - # git provider should raise timeout error to avoid lock race conditions - self.assertRaises(TimeoutError, provider.clear_lock) - finally: - provider._master_lock.release() From 3d2c111f5805ee3515c3b9fd8e935185851b9297 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 16 Feb 2024 16:15:29 -0700 Subject: [PATCH 015/160] [WIP] Almost working port from unit test to pytest, contains debugging --- tests/pytests/unit/utils/test_gitfs_locks.py | 333 ++++++++++++++++++- 1 file changed, 324 insertions(+), 9 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index c5260da3d9f..0b9b814af48 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -3,7 +3,14 @@ These only test the provider selection and verification logic, they do not init any remotes. """ +import logging +import pathlib + +## import os +import tempfile + import pytest +from saltfactories.utils import random_string import salt.ext.tornado.ioloop import salt.fileserver.gitfs @@ -11,7 +18,70 @@ import salt.utils.files import salt.utils.gitfs import salt.utils.path import salt.utils.platform -from tests.support.mixins import AdaptedConfigurationTestCaseMixin +from salt.utils.immutabletypes import freeze +from salt.utils.verify import verify_env + +## from tests.support.mixins import AdaptedConfigurationTestCaseMixin +from tests.support.runtests import RUNTIME_VARS + +log = logging.getLogger(__name__) + + +@pytest.fixture(scope="session", autouse=True) +def _create_old_tempdir(): + pathlib.Path(RUNTIME_VARS.TMP).mkdir(exist_ok=True, parents=True) + + +@pytest.fixture(scope="session", autouse=True) +def bridge_pytest_and_runtests( + reap_stray_processes, + salt_factories, + salt_syndic_master_factory, + salt_syndic_factory, + salt_master_factory, + salt_minion_factory, + salt_sub_minion_factory, + sshd_config_dir, +): + # Make sure unittest2 uses the pytest generated configuration + RUNTIME_VARS.RUNTIME_CONFIGS["master"] = freeze(salt_master_factory.config) + RUNTIME_VARS.RUNTIME_CONFIGS["minion"] = freeze(salt_minion_factory.config) + RUNTIME_VARS.RUNTIME_CONFIGS["sub_minion"] = freeze(salt_sub_minion_factory.config) + RUNTIME_VARS.RUNTIME_CONFIGS["syndic_master"] = freeze( + salt_syndic_master_factory.config + ) + RUNTIME_VARS.RUNTIME_CONFIGS["syndic"] = freeze(salt_syndic_factory.config) + RUNTIME_VARS.RUNTIME_CONFIGS["client_config"] = freeze( + salt.config.client_config(salt_master_factory.config["conf_file"]) + ) + + # Make sure unittest2 classes know their paths + RUNTIME_VARS.TMP_ROOT_DIR = str(salt_factories.root_dir.resolve()) + RUNTIME_VARS.TMP_CONF_DIR = pathlib.PurePath( + salt_master_factory.config["conf_file"] + ).parent + RUNTIME_VARS.TMP_MINION_CONF_DIR = pathlib.PurePath( + salt_minion_factory.config["conf_file"] + ).parent + RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR = pathlib.PurePath( + salt_sub_minion_factory.config["conf_file"] + ).parent + RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR = pathlib.PurePath( + salt_syndic_master_factory.config["conf_file"] + ).parent + RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR = pathlib.PurePath( + salt_syndic_factory.config["conf_file"] + ).parent + RUNTIME_VARS.TMP_SSH_CONF_DIR = str(sshd_config_dir) + + +## @pytest.fixture(scope="session", autouse=True) +## def get_tmp_dir(tmp_path): +## dirpath = tmp_path / "git_test" +## dirpath.mkdir(parents=True) +## return dirpath +## +## ## dirpath.cleanup() def _clear_instance_map(): @@ -23,13 +93,247 @@ def _clear_instance_map(): pass -@pytest.fixture -def get_tmp_dir(tmp_path): - dirpath = tmp_path / "git_test" - dirpath.mkdir(parents=True) - return dirpath +class AdaptedConfigurationTestCaseMixin: - ## dirpath.cleanup() + ## __slots__ = () + + @staticmethod + def get_temp_config(config_for, **config_overrides): + log.debug( + f"DGM AdaptedConfigurationTestCaseMixin get_temp_config, config_for '{config_for}, config_overrides '{config_overrides}', runtime_vars tmp '{RUNTIME_VARS.TMP}', user '{RUNTIME_VARS.RUNNING_TESTS_USER}'" + ) + + rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) + + if not pathlib.Path(rootdir).exists(): + log.debug( + f"DGM AdaptedConfigurationTestCaseMixin get_temp_config, oddity runtime_vars '{RUNTIME_VARS.TMP}' should already exist" + ) + pathlib.Path(RUNTIME_VARS.TMP).mkdir(exist_ok=True, parents=True) + + rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) + ## if not os.path.exists(rootdir): + ## log.debug(f"DGM AdaptedConfigurationTestCaseMixin get_temp_config, oddity runtime_vars '{RUNTIME_VARS.TMP}' should already exist") + ## os.makedirs(rootdir) + + ## conf_dir = config_overrides.pop("conf_dir", os.path.join(rootdir, "conf")) + conf_dir = config_overrides.pop( + "conf_dir", pathlib.PurePath(rootdir).joinpath("conf") + ) + for key in ("cachedir", "pki_dir", "sock_dir"): + if key not in config_overrides: + config_overrides[key] = key + if "log_file" not in config_overrides: + config_overrides["log_file"] = f"logs/{config_for}.log".format() + if "user" not in config_overrides: + config_overrides["user"] = RUNTIME_VARS.RUNNING_TESTS_USER + config_overrides["root_dir"] = rootdir + + cdict = AdaptedConfigurationTestCaseMixin.get_config( + config_for, from_scratch=True + ) + + if config_for in ("master", "client_config"): + rdict = salt.config.apply_master_config(config_overrides, cdict) + if config_for == "minion": + minion_id = ( + config_overrides.get("id") + or config_overrides.get("minion_id") + or cdict.get("id") + or cdict.get("minion_id") + or random_string("temp-minion-") + ) + config_overrides["minion_id"] = config_overrides["id"] = minion_id + rdict = salt.config.apply_minion_config( + config_overrides, cdict, cache_minion_id=False, minion_id=minion_id + ) + + ## verify_env( + ## [ + ## os.path.join(rdict["pki_dir"], "minions"), + ## os.path.join(rdict["pki_dir"], "minions_pre"), + ## os.path.join(rdict["pki_dir"], "minions_rejected"), + ## os.path.join(rdict["pki_dir"], "minions_denied"), + ## os.path.join(rdict["cachedir"], "jobs"), + ## os.path.join(rdict["cachedir"], "tokens"), + ## os.path.join(rdict["root_dir"], "cache", "tokens"), + ## os.path.join(rdict["pki_dir"], "accepted"), + ## os.path.join(rdict["pki_dir"], "rejected"), + ## os.path.join(rdict["pki_dir"], "pending"), + ## os.path.dirname(rdict["log_file"]), + ## rdict["sock_dir"], + ## conf_dir, + ## ], + ## RUNTIME_VARS.RUNNING_TESTS_USER, + ## root_dir=rdict["root_dir"], + ## ) + verify_env( + [ + pathlib.PurePath(rdict["pki_dir"]).joinpath("minions"), + pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_pre"), + pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_rejected"), + pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_denied"), + pathlib.PurePath(rdict["cachedir"]).joinpath("jobs"), + pathlib.PurePath(rdict["cachedir"]).joinpath("tokens"), + pathlib.PurePath(rdict["root_dir"]).joinpath("cache", "tokens"), + pathlib.PurePath(rdict["pki_dir"]).joinpath("accepted"), + pathlib.PurePath(rdict["pki_dir"]).joinpath("rejected"), + pathlib.PurePath(rdict["pki_dir"]).joinpath("pending"), + pathlib.PurePath(rdict["log_file"]).parent, + rdict["sock_dir"], + conf_dir, + ], + RUNTIME_VARS.RUNNING_TESTS_USER, + root_dir=rdict["root_dir"], + ) + + ## rdict["conf_file"] = os.path.join(conf_dir, config_for) + rdict["conf_file"] = pathlib.PurePath(conf_dir).joinpath(config_for) + with salt.utils.files.fopen(rdict["conf_file"], "w") as wfh: + salt.utils.yaml.safe_dump(rdict, wfh, default_flow_style=False) + return rdict + + @staticmethod + def get_config(config_for, from_scratch=False): + log.debug( + f"DGM AdaptedConfigurationTestCaseMixin get_config, config_for '{config_for}, from_scratch '{from_scratch}', runtime runtime_configs '{RUNTIME_VARS.RUNTIME_CONFIGS}'" + ) + + if from_scratch: + if config_for in ("master", "syndic_master", "mm_master", "mm_sub_master"): + return salt.config.master_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path(config_for) + ) + elif config_for in ("minion", "sub_minion"): + return salt.config.minion_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path(config_for), + cache_minion_id=False, + ) + elif config_for in ("syndic",): + return salt.config.syndic_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path(config_for), + AdaptedConfigurationTestCaseMixin.get_config_file_path("minion"), + ) + elif config_for == "client_config": + return salt.config.client_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path("master") + ) + + if config_for not in RUNTIME_VARS.RUNTIME_CONFIGS: + if config_for in ("master", "syndic_master", "mm_master", "mm_sub_master"): + RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( + salt.config.master_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path( + config_for + ) + ) + ) + elif config_for in ("minion", "sub_minion"): + RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( + salt.config.minion_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path( + config_for + ) + ) + ) + elif config_for in ("syndic",): + RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( + salt.config.syndic_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path( + config_for + ), + AdaptedConfigurationTestCaseMixin.get_config_file_path( + "minion" + ), + ) + ) + elif config_for == "client_config": + RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( + salt.config.client_config( + AdaptedConfigurationTestCaseMixin.get_config_file_path("master") + ) + ) + return RUNTIME_VARS.RUNTIME_CONFIGS[config_for] + + @property + def config_dir(self): + return RUNTIME_VARS.TMP_CONF_DIR + + def get_config_dir(self): + log.warning("Use the config_dir attribute instead of calling get_config_dir()") + return self.config_dir + + @staticmethod + def get_config_file_path(filename): + ## if filename == "master": + ## return os.path.join(RUNTIME_VARS.TMP_CONF_DIR, filename) + ## if filename == "minion": + ## return os.path.join(RUNTIME_VARS.TMP_MINION_CONF_DIR, filename) + ## if filename == "syndic_master": + ## return os.path.join(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR, "master") + ## if filename == "syndic": + ## return os.path.join(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, "minion") + ## if filename == "sub_minion": + ## return os.path.join(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR, "minion") + ## if filename == "mm_master": + ## return os.path.join(RUNTIME_VARS.TMP_MM_CONF_DIR, "master") + ## if filename == "mm_sub_master": + ## return os.path.join(RUNTIME_VARS.TMP_MM_SUB_CONF_DIR, "master") + ## if filename == "mm_minion": + ## return os.path.join(RUNTIME_VARS.TMP_MM_MINION_CONF_DIR, "minion") + ## if filename == "mm_sub_minion": + ## return os.path.join(RUNTIME_VARS.TMP_MM_SUB_MINION_CONF_DIR, "minion") + ## return os.path.join(RUNTIME_VARS.TMP_CONF_DIR, filename) + if filename == "master": + return pathlib.PurePath(RUNTIME_VARS.TMP_CONF_DIR).joinpath(filename) + if filename == "minion": + return pathlib.PurePath(RUNTIME_VARS.TMP_MINION_CONF_DIR).joinpath(filename) + if filename == "syndic_master": + return pathlib.PurePath(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR).joinpath( + "master" + ) + if filename == "syndic": + return pathlib.PurePath(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR).joinpath( + "minion" + ) + if filename == "sub_minion": + return pathlib.PurePath(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR).joinpath( + "minion" + ) + if filename == "mm_master": + return pathlib.PurePath(RUNTIME_VARS.TMP_MM_CONF_DIR).joinpath("master") + if filename == "mm_sub_master": + return pathlib.PurePath(RUNTIME_VARS.TMP_MM_SUB_CONF_DIR).joinpath("master") + if filename == "mm_minion": + return pathlib.PurePath(RUNTIME_VARS.TMP_MM_MINION_CONF_DIR).joinpath( + "minion" + ) + if filename == "mm_sub_minion": + return pathlib.PurePath(RUNTIME_VARS.TMP_MM_SUB_MINION_CONF_DIR).joinpath( + "minion" + ) + return pathlib.PurePath(RUNTIME_VARS.TMP_CONF_DIR).joinpath(filename) + + @property + def master_opts(self): + """ + Return the options used for the master + """ + return self.get_config("master") + + @property + def minion_opts(self): + """ + Return the options used for the minion + """ + return self.get_config("minion") + + @property + def sub_minion_opts(self): + """ + Return the options used for the sub_minion + """ + return self.get_config("sub_minion") class TestGitBase(AdaptedConfigurationTestCaseMixin): @@ -40,6 +344,12 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): def __init__( self, ): + # TBD DGM need to fixup using tmp_path fixture + ## self._tmp_dir = pathlib.Path(tmp_path / "git_test").mkdir(exist_ok=True, parents=True) + ## tmp_name = str(self._tmp_dir) + self._tmp_dir = tempfile.TemporaryDirectory() + tmp_name = self._tmp_dir.name + class MockedProvider( salt.utils.gitfs.GitProvider ): # pylint: disable=abstract-method @@ -66,7 +376,7 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): ) def init_remote(self): - self.gitdir = salt.utils.path.join(get_tmp_dir, ".git") + self.gitdir = salt.utils.path.join(tmp_name, ".git") self.repo = True new = False return new @@ -108,12 +418,17 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): @pytest.fixture def main_class(tmp_path): test_git_base = TestGitBase() - yield test_git_base + log.debug(f"DGM main_class, test_git_base '{dir(test_git_base)}'") + log.debug( + f"DGM main_class, test_git_base.main_class '{dir(test_git_base.main_class)}'" + ) + yield test_git_base.main_class test_git_base.tearDown() def test_update_all(main_class): + log.debug(f"DGM test_update_all, main_class '{dir(main_class)}'") main_class.update() assert len(main_class.remotes) == 2, "Wrong number of remotes" assert main_class.remotes[0].fetched From ec549d6a06594b2c692ac9fe96acb1e6c400beac Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 16 Feb 2024 16:19:54 -0700 Subject: [PATCH 016/160] Working tests ported from unittest to pytest --- tests/pytests/unit/utils/test_gitfs_locks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 0b9b814af48..026f416160d 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -454,7 +454,7 @@ def test_get_cachedir_basename(main_class): assert main_class.remotes[1].get_cache_basename() == "_" -def test_git_provider_mp_lock(): +def test_git_provider_mp_lock(main_class): """ Check that lock is released after provider.lock() """ From f832181ee34334f8a8a50360e603c226f99d11f2 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 22 Feb 2024 09:45:09 -0700 Subject: [PATCH 017/160] Testing additional test --- tests/pytests/unit/utils/test_gitfs_locks.py | 66 +++++++------------- 1 file changed, 22 insertions(+), 44 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 026f416160d..b4eb8693f8a 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -112,11 +112,6 @@ class AdaptedConfigurationTestCaseMixin: pathlib.Path(RUNTIME_VARS.TMP).mkdir(exist_ok=True, parents=True) rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) - ## if not os.path.exists(rootdir): - ## log.debug(f"DGM AdaptedConfigurationTestCaseMixin get_temp_config, oddity runtime_vars '{RUNTIME_VARS.TMP}' should already exist") - ## os.makedirs(rootdir) - - ## conf_dir = config_overrides.pop("conf_dir", os.path.join(rootdir, "conf")) conf_dir = config_overrides.pop( "conf_dir", pathlib.PurePath(rootdir).joinpath("conf") ) @@ -148,25 +143,6 @@ class AdaptedConfigurationTestCaseMixin: config_overrides, cdict, cache_minion_id=False, minion_id=minion_id ) - ## verify_env( - ## [ - ## os.path.join(rdict["pki_dir"], "minions"), - ## os.path.join(rdict["pki_dir"], "minions_pre"), - ## os.path.join(rdict["pki_dir"], "minions_rejected"), - ## os.path.join(rdict["pki_dir"], "minions_denied"), - ## os.path.join(rdict["cachedir"], "jobs"), - ## os.path.join(rdict["cachedir"], "tokens"), - ## os.path.join(rdict["root_dir"], "cache", "tokens"), - ## os.path.join(rdict["pki_dir"], "accepted"), - ## os.path.join(rdict["pki_dir"], "rejected"), - ## os.path.join(rdict["pki_dir"], "pending"), - ## os.path.dirname(rdict["log_file"]), - ## rdict["sock_dir"], - ## conf_dir, - ## ], - ## RUNTIME_VARS.RUNNING_TESTS_USER, - ## root_dir=rdict["root_dir"], - ## ) verify_env( [ pathlib.PurePath(rdict["pki_dir"]).joinpath("minions"), @@ -187,7 +163,6 @@ class AdaptedConfigurationTestCaseMixin: root_dir=rdict["root_dir"], ) - ## rdict["conf_file"] = os.path.join(conf_dir, config_for) rdict["conf_file"] = pathlib.PurePath(conf_dir).joinpath(config_for) with salt.utils.files.fopen(rdict["conf_file"], "w") as wfh: salt.utils.yaml.safe_dump(rdict, wfh, default_flow_style=False) @@ -265,25 +240,6 @@ class AdaptedConfigurationTestCaseMixin: @staticmethod def get_config_file_path(filename): - ## if filename == "master": - ## return os.path.join(RUNTIME_VARS.TMP_CONF_DIR, filename) - ## if filename == "minion": - ## return os.path.join(RUNTIME_VARS.TMP_MINION_CONF_DIR, filename) - ## if filename == "syndic_master": - ## return os.path.join(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR, "master") - ## if filename == "syndic": - ## return os.path.join(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR, "minion") - ## if filename == "sub_minion": - ## return os.path.join(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR, "minion") - ## if filename == "mm_master": - ## return os.path.join(RUNTIME_VARS.TMP_MM_CONF_DIR, "master") - ## if filename == "mm_sub_master": - ## return os.path.join(RUNTIME_VARS.TMP_MM_SUB_CONF_DIR, "master") - ## if filename == "mm_minion": - ## return os.path.join(RUNTIME_VARS.TMP_MM_MINION_CONF_DIR, "minion") - ## if filename == "mm_sub_minion": - ## return os.path.join(RUNTIME_VARS.TMP_MM_SUB_MINION_CONF_DIR, "minion") - ## return os.path.join(RUNTIME_VARS.TMP_CONF_DIR, filename) if filename == "master": return pathlib.PurePath(RUNTIME_VARS.TMP_CONF_DIR).joinpath(filename) if filename == "minion": @@ -506,3 +462,25 @@ def test_git_provider_mp_clear_lock_timeout(main_class): pytest.raises(TimeoutError, provider.clear_lock) finally: provider._master_lock.release() + + +@pytest.mark.slow_test +@pytest.mark.timeout_unless_on_windows(120) +def test_git_provider_mp_gen_lock(main_class): + """ + Check that gen_lock is obtains lock, and then releases, provider.lock() + """ + log.debug(f"DGM test_git_provider_mp_gen_lock, main_class '{dir(main_class)}'") + provider = main_class.remotes[0] + provider.gen_lock() + try: + # check that lock has been acquired + assert provider._master_lock.acquire(timeout=5) + # git provider should raise timeout error to avoid lock race conditions + pytest.raises(TimeoutError, provider.lock) + finally: + provider.gen_lock() + + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() From 9c80258c0c7fc158ed7c92cb8bb99b8d1ae61057 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 23 Feb 2024 16:11:16 -0700 Subject: [PATCH 018/160] Tests for gen_lock functionality --- salt/utils/gitfs.py | 6 ++- tests/pytests/unit/utils/test_gitfs_locks.py | 54 +++++++------------- 2 files changed, 23 insertions(+), 37 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 6ee5ed96f3d..bf41cadb9cf 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -919,6 +919,8 @@ class GitProvider: str(exc), ) return False + except NotImplementedError as exc: + log.warning(f"fetch got NotImplementedError, exc '{exc}'") def _lock(self, lock_type="update", failhard=False): """ @@ -1062,7 +1064,8 @@ class GitProvider: @contextlib.contextmanager def gen_lock(self, lock_type="update", timeout=0, poll_interval=0.5): """ - Set and automatically clear a lock + Set and automatically clear a lock, + should be called from a context, for example: with self.gen_lock() """ if not isinstance(lock_type, str): raise GitLockError(errno.EINVAL, f"Invalid lock_type '{lock_type}'") @@ -2702,6 +2705,7 @@ class GitBase: success, failed = repo.clear_lock(lock_type=lock_type) cleared.extend(success) errors.extend(failed) + return cleared, errors def fetch_remotes(self, remotes=None): diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index b4eb8693f8a..decc363a5dc 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -5,8 +5,6 @@ any remotes. import logging import pathlib - -## import os import tempfile import pytest @@ -20,8 +18,6 @@ import salt.utils.path import salt.utils.platform from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env - -## from tests.support.mixins import AdaptedConfigurationTestCaseMixin from tests.support.runtests import RUNTIME_VARS log = logging.getLogger(__name__) @@ -75,7 +71,7 @@ def bridge_pytest_and_runtests( RUNTIME_VARS.TMP_SSH_CONF_DIR = str(sshd_config_dir) -## @pytest.fixture(scope="session", autouse=True) +## @pytest.fixture ## def get_tmp_dir(tmp_path): ## dirpath = tmp_path / "git_test" ## dirpath.mkdir(parents=True) @@ -99,16 +95,10 @@ class AdaptedConfigurationTestCaseMixin: @staticmethod def get_temp_config(config_for, **config_overrides): - log.debug( - f"DGM AdaptedConfigurationTestCaseMixin get_temp_config, config_for '{config_for}, config_overrides '{config_overrides}', runtime_vars tmp '{RUNTIME_VARS.TMP}', user '{RUNTIME_VARS.RUNNING_TESTS_USER}'" - ) rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) if not pathlib.Path(rootdir).exists(): - log.debug( - f"DGM AdaptedConfigurationTestCaseMixin get_temp_config, oddity runtime_vars '{RUNTIME_VARS.TMP}' should already exist" - ) pathlib.Path(RUNTIME_VARS.TMP).mkdir(exist_ok=True, parents=True) rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) @@ -170,10 +160,6 @@ class AdaptedConfigurationTestCaseMixin: @staticmethod def get_config(config_for, from_scratch=False): - log.debug( - f"DGM AdaptedConfigurationTestCaseMixin get_config, config_for '{config_for}, from_scratch '{from_scratch}', runtime runtime_configs '{RUNTIME_VARS.RUNTIME_CONFIGS}'" - ) - if from_scratch: if config_for in ("master", "syndic_master", "mm_master", "mm_sub_master"): return salt.config.master_config( @@ -300,7 +286,6 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): def __init__( self, ): - # TBD DGM need to fixup using tmp_path fixture ## self._tmp_dir = pathlib.Path(tmp_path / "git_test").mkdir(exist_ok=True, parents=True) ## tmp_name = str(self._tmp_dir) self._tmp_dir = tempfile.TemporaryDirectory() @@ -332,16 +317,21 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): ) def init_remote(self): + log.debug(f"DGM MockedProvider init_remote tmp_name '{tmp_name}'") self.gitdir = salt.utils.path.join(tmp_name, ".git") + log.debug(f"DGM MockedProvider init_remote gitdir '{self.gitdir}'") self.repo = True new = False return new def envs(self): + dgm_test_base = ["base"] + log.debug(f"DGM MockedProvider env base '{dgm_test_base}'") return ["base"] - def fetch(self): + def _fetch(self): self.fetched = True + log.debug(f"DGM MockedProvider _fetch self.fetched '{self.fetched}'") # Clear the instance map so that we make sure to create a new instance # for this test class. @@ -374,17 +364,12 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): @pytest.fixture def main_class(tmp_path): test_git_base = TestGitBase() - log.debug(f"DGM main_class, test_git_base '{dir(test_git_base)}'") - log.debug( - f"DGM main_class, test_git_base.main_class '{dir(test_git_base.main_class)}'" - ) yield test_git_base.main_class test_git_base.tearDown() def test_update_all(main_class): - log.debug(f"DGM test_update_all, main_class '{dir(main_class)}'") main_class.update() assert len(main_class.remotes) == 2, "Wrong number of remotes" assert main_class.remotes[0].fetched @@ -466,21 +451,18 @@ def test_git_provider_mp_clear_lock_timeout(main_class): @pytest.mark.slow_test @pytest.mark.timeout_unless_on_windows(120) -def test_git_provider_mp_gen_lock(main_class): +def test_git_provider_mp_gen_lock(main_class, caplog): """ Check that gen_lock is obtains lock, and then releases, provider.lock() """ - log.debug(f"DGM test_git_provider_mp_gen_lock, main_class '{dir(main_class)}'") - provider = main_class.remotes[0] - provider.gen_lock() - try: - # check that lock has been acquired - assert provider._master_lock.acquire(timeout=5) - # git provider should raise timeout error to avoid lock race conditions - pytest.raises(TimeoutError, provider.lock) - finally: - provider.gen_lock() + test_msg1 = "Set update lock for gitfs remote 'file://repo1.git' on machine_id" + test_msg2 = "Attempting to remove 'update' lock for 'gitfs' remote 'file://repo1.git' due to lock_set1 'True' or lock_set2" + test_msg3 = "Removed update lock for gitfs remote 'file://repo1.git' on machine_id" - # check that lock has been released - assert provider._master_lock.acquire(timeout=5) - provider._master_lock.release() + provider = main_class.remotes[0] + with caplog.at_level(logging.DEBUG): + provider.fetch() + + assert test_msg1 in caplog.text + assert test_msg2 in caplog.text + assert test_msg3 in caplog.text From ee55b9abe30f52777e5cae3d355f094e254aaaf5 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 26 Feb 2024 16:01:57 -0700 Subject: [PATCH 019/160] Initial SIGTERM testing --- salt/utils/process.py | 5 +- tests/pytests/unit/utils/test_gitfs_locks.py | 57 ++++++++++++++++++++ 2 files changed, 60 insertions(+), 2 deletions(-) diff --git a/salt/utils/process.py b/salt/utils/process.py index 9cb7633ff9e..dd592cbacf3 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1096,11 +1096,12 @@ class SignalHandlingProcess(Process): # need to go through and clean up any resources left around like lock files if using gitfs # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) - if gitfs_active: + if cache_dir and gitfs_active: # check for gitfs file locks to ensure no resource leaks # last chance to clean up any missed unlock droppings - cache_dir = Path("/var/cache/salt/master/gitfs/work") + cache_dir = Path(cache_dir + "/gitfs/work") if cache_dir.exists and cache_dir.is_dir(): file_list = list(cache_dir.glob("**/*.lk")) file_del_list = [] diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index decc363a5dc..044758ec664 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -6,6 +6,8 @@ any remotes. import logging import pathlib import tempfile +import time +from multiprocessing import Process import pytest from saltfactories.utils import random_string @@ -466,3 +468,58 @@ def test_git_provider_mp_gen_lock(main_class, caplog): assert test_msg1 in caplog.text assert test_msg2 in caplog.text assert test_msg3 in caplog.text + + +def process_kill_test(main_class): + """ + Process to obtain a lock and hold it, + which will then be given a SIGTERM to ensure clean up of resources for the lock + + Check that lock is obtained and then it should be released by SIGTERM checks + """ + provider = main_class.remotes[0] + provider.lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + + time.sleep(20) # give time for kill by sigterm + + +@pytest.mark.slow_test +@pytest.mark.skip_unless_on_linux +def test_git_provider_sigterm_cleanup(caplog): + """ + Start process which will obtain lock, and leave it locked + then kill the process via SIGTERM and ensure locked resources are cleaned up + """ + log.debug("DGM test_git_provider_sigterm_cleanup entry") + ## start process_kill_test and obtain it's PID + ## proc = subprocess.Popen("what go's here to start process_kill_test, etc") + ## + ## child_pid = proc.pid + ## log.debug(f"DGM test_git_provider_sigterm_cleanup child process pid '{child_pid}'") + ## + ## with caplog.at_level(logging.DEBUG): + ## proc.send_signal(signal.SIGTERM) + + proc = Process(target=process_kill_test) + proc.start() + + while not proc.is_alive(): + log.debug( + "DGM test_git_provider_sigterm_cleanup sleeping waiting for child process to become alive" + ) + time.sleep(1) # give some time for it to be started + + # child process should be alive + log.debug( + f"DGM test_git_provider_sigterm_cleanup child process is alive with pid '{proc.pid}'" + ) + + with caplog.at_level(logging.DEBUG): + proc.terminate() # sends a SIGTERM + + test_msg1 = "SIGTERM clean up of resources, removed lock file" + assert test_msg1 in caplog.text + + log.debug("DGM test_git_provider_sigterm_cleanup exit") From 443fdd3e3496001c1f0d7432638898215188e173 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 27 Feb 2024 16:58:55 -0700 Subject: [PATCH 020/160] WIP - Test kill cleanup of resources --- salt/utils/process.py | 41 ++++++++++++++ tests/pytests/unit/utils/test_gitfs_locks.py | 56 ++++++++++++++------ 2 files changed, 81 insertions(+), 16 deletions(-) diff --git a/salt/utils/process.py b/salt/utils/process.py index dd592cbacf3..f19ddf2335b 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -577,6 +577,7 @@ class ProcessManager: self._restart_processes = False def send_signal_to_processes(self, signal_): + log.warning(f"DGM process send_signal_to_processes signal '{signal_}'") if salt.utils.platform.is_windows() and signal_ in ( signal.SIGTERM, signal.SIGINT, @@ -595,8 +596,14 @@ class ProcessManager: for pid in self._process_map.copy(): try: + log.warning( + f"DGM process send_signal_to_processes kill pid '{pid}', signal '{signal_}'" + ) os.kill(pid, signal_) except OSError as exc: + log.warning( + f"DGM process send_signal_to_processes OSError exc, '{exc}'" + ) if exc.errno not in (errno.ESRCH, errno.EACCES): # If it's not a "No such process" error, raise it raise @@ -658,6 +665,7 @@ class ProcessManager: """ Kill all of the children """ + log.warning("DGM process kill_children entry") if salt.utils.platform.is_windows(): if multiprocessing.current_process().name != "MainProcess": # Since the main process will kill subprocesses by tree, @@ -678,6 +686,7 @@ class ProcessManager: p_map["Process"].terminate() else: for pid, p_map in self._process_map.copy().items(): + log.warning("DGM Terminating pid %s: %s", pid, p_map["Process"]) log.trace("Terminating pid %s: %s", pid, p_map["Process"]) if args: # escalate the signal to the process @@ -774,11 +783,17 @@ class ProcessManager: """ Properly terminate this process manager instance """ + log.warning("DGM process terminate entry") self.stop_restarting() + log.warning("DGM process terminate send signal SIGTERM") self.send_signal_to_processes(signal.SIGTERM) + log.warning("DGM process terminate kill children") self.kill_children() + log.warning("DGM process terminate exit") def _handle_signals(self, *args, **kwargs): + log.warning(f"DGM process _handle_signals args '{args}', kwargs '{kwargs}'") + # first lets reset signal handlers to default one to prevent running this twice signal.signal(signal.SIGTERM, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN) @@ -1062,6 +1077,10 @@ class SignalHandlingProcess(Process): signal.signal(signal.SIGTERM, self._handle_signals) def _handle_signals(self, signum, sigframe): + log.warning( + f"DGM SignalHandlingProcess _handle_signals, signum '{signum}', sigframe '{sigframe}'" + ) + signal.signal(signal.SIGTERM, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN) msg = f"{self.__class__.__name__} received a " @@ -1077,6 +1096,10 @@ class SignalHandlingProcess(Process): f"exiting for process id '{os.getpid()}' and machine identifer '{mach_id}'" ) + log.warning( + f"DGM _handle_signals about to check HAS_PSUTIL, for process id '{os.getpid()}' and machine identifer '{mach_id}'" + ) + if HAS_PSUTIL: try: cur_pid = os.getpid() @@ -1098,16 +1121,28 @@ class SignalHandlingProcess(Process): # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) + log.warning( + f"DGM _handle_signals HAS_PSUTIL, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + ) if cache_dir and gitfs_active: # check for gitfs file locks to ensure no resource leaks # last chance to clean up any missed unlock droppings cache_dir = Path(cache_dir + "/gitfs/work") + log.warning( + f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}'" + ) if cache_dir.exists and cache_dir.is_dir(): file_list = list(cache_dir.glob("**/*.lk")) + log.warning( + f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}', produced glob file_list '{file_list}'" + ) file_del_list = [] try: for file_name in file_list: + log.warning( + f"DGM _handle_signals HAS_PSUTIL, checking file name '{file_name}'" + ) with salt.utils.files.fopen(file_name, "r") as fd_: try: file_pid = int( @@ -1127,6 +1162,9 @@ class SignalHandlingProcess(Process): except ValueError: # Lock file is empty, set mach_id to 0 so it evaluates as False. file_mach_id = 0 + log.warning( + f"DGM _handle_signals HAS_PSUTIL, cur_pid '{cur_pid}', mach_id '{mach_id}', file_pid '{file_pid}', file_mach_id '{file_mach_id}'" + ) if cur_pid == file_pid: if mach_id != file_mach_id: if not file_mach_id: @@ -1145,6 +1183,9 @@ class SignalHandlingProcess(Process): for (file_name, file_pid, file_mach_id) in file_del_list: try: + log.warning( + f"DGM _handle_signals file_pid '{file_pid}', file_mach_id '{file_mach_id}', removing file name '{file_name}'" + ) os.remove(file_name) except OSError as exc: if exc.errno == errno.ENOENT: diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 044758ec664..01405d394ee 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -7,7 +7,6 @@ import logging import pathlib import tempfile import time -from multiprocessing import Process import pytest from saltfactories.utils import random_string @@ -18,6 +17,7 @@ import salt.utils.files import salt.utils.gitfs import salt.utils.path import salt.utils.platform +import salt.utils.process from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env from tests.support.runtests import RUNTIME_VARS @@ -477,33 +477,34 @@ def process_kill_test(main_class): Check that lock is obtained and then it should be released by SIGTERM checks """ + log.debug("DGM process_kill_test entry") provider = main_class.remotes[0] provider.lock() + + log.debug("DGM process_kill_test obtained lock") + # check that lock has been released assert provider._master_lock.acquire(timeout=5) + log.debug("DGM process_kill_test tested assert masterlock acquire") time.sleep(20) # give time for kill by sigterm + log.debug("DGM process_kill_test exit") @pytest.mark.slow_test @pytest.mark.skip_unless_on_linux -def test_git_provider_sigterm_cleanup(caplog): +def test_git_provider_sigterm_cleanup(main_class, caplog): """ Start process which will obtain lock, and leave it locked then kill the process via SIGTERM and ensure locked resources are cleaned up """ log.debug("DGM test_git_provider_sigterm_cleanup entry") - ## start process_kill_test and obtain it's PID - ## proc = subprocess.Popen("what go's here to start process_kill_test, etc") - ## - ## child_pid = proc.pid - ## log.debug(f"DGM test_git_provider_sigterm_cleanup child process pid '{child_pid}'") - ## - ## with caplog.at_level(logging.DEBUG): - ## proc.send_signal(signal.SIGTERM) - proc = Process(target=process_kill_test) - proc.start() + provider = main_class.remotes[0] + + procmgr = salt.utils.process.ProcessManager(wait_for_kill=30) + proc = procmgr.add_process(process_kill_test, args=(main_class,), name="test_kill") + ## proc.start() while not proc.is_alive(): log.debug( @@ -516,10 +517,33 @@ def test_git_provider_sigterm_cleanup(caplog): f"DGM test_git_provider_sigterm_cleanup child process is alive with pid '{proc.pid}'" ) - with caplog.at_level(logging.DEBUG): - proc.terminate() # sends a SIGTERM + file_name = provider._get_lock_file("update") + log.debug( + f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'" + ) - test_msg1 = "SIGTERM clean up of resources, removed lock file" - assert test_msg1 in caplog.text + assert pathlib.Path(file_name).exists() + assert pathlib.Path(file_name).is_file() + + log.debug( + f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', exists and is a file, send SIGTERM signal" + ) + + proc.terminate() # sends a SIGTERM + + time.sleep(1) # give some time for it to terminate + log.debug("DGM test_git_provider_sigterm_cleanup lock , post terminate") + + assert not proc.is_alive() + log.debug("DGM test_git_provider_sigterm_cleanup lock , child is not alive") + + test_file_exits = pathlib.Path(file_name).exists() + log.debug( + f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does it exist anymore '{test_file_exits}'" + ) + assert not pathlib.Path(file_name).exists() + log.debug( + f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does NOT exist anymore" + ) log.debug("DGM test_git_provider_sigterm_cleanup exit") From 56a11ba9d131e61030c9b97a921ea88dea51c26d Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 29 Feb 2024 13:59:10 -0700 Subject: [PATCH 021/160] WIP Test for kill SIGTERM --- salt/utils/gitfs.py | 6 +- salt/utils/process.py | 180 +++++++++++++++---- tests/pytests/unit/utils/test_gitfs_locks.py | 89 ++++++--- 3 files changed, 211 insertions(+), 64 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index bf41cadb9cf..84546c24b46 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -251,7 +251,7 @@ class GitProvider: # DGM try getting machine_identifier # get machine_identifier self.mach_id = salt.utils.files.get_machine_identifier() - log.debug(f"machine_id for lock file, machine_id '{self.mach_id}'") + log.debug("DGM machine_id for lock file, machine_id %s", self.mach_id) self.global_saltenv = salt.utils.data.repack_dictlist( self.opts.get(f"{self.role}_saltenv", []), @@ -920,7 +920,7 @@ class GitProvider: ) return False except NotImplementedError as exc: - log.warning(f"fetch got NotImplementedError, exc '{exc}'") + log.warning("fetch got NotImplementedError exception %s", exc) def _lock(self, lock_type="update", failhard=False): """ @@ -1041,6 +1041,8 @@ class GitProvider: contextmanager here because the lock is meant to stay and not be automatically removed. """ + dbg_msg = f"DGM GitProvider lock entry, pid '{os.getpid()}'" + log.warning(dbg_msg) success = [] failed = [] try: diff --git a/salt/utils/process.py b/salt/utils/process.py index f19ddf2335b..aed2633d454 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -514,6 +514,8 @@ class ProcessManager: This will deterimine if it is a Process class, otherwise it assumes it is a function """ + dbg_msg = f"DGM process add_process entry, tgt '{tgt}', args '{args}', kwargs '{kwargs}', name '{name}'" + log.warning(dbg_msg) if args is None: args = [] if kwargs is None: @@ -529,9 +531,16 @@ class ProcessManager: if isinstance(process, SignalHandlingProcess): with default_signals(signal.SIGINT, signal.SIGTERM): + log.warning( + "DGM process add_process with default_signals, process start" + ) process.start() else: + log.warning( + "DGM process add_process without default_signals, process start" + ) process.start() + log.debug("Started '%s' with pid %s", process.name, process.pid) self._process_map[process.pid] = { "tgt": tgt, @@ -539,6 +548,7 @@ class ProcessManager: "kwargs": kwargs, "Process": process, } + return process def restart_process(self, pid): @@ -577,7 +587,9 @@ class ProcessManager: self._restart_processes = False def send_signal_to_processes(self, signal_): - log.warning(f"DGM process send_signal_to_processes signal '{signal_}'") + dbg_msg = f"DGM process send_signal_to_processes signal '{signal_}'" + log.warning(dbg_msg) + if salt.utils.platform.is_windows() and signal_ in ( signal.SIGTERM, signal.SIGINT, @@ -596,14 +608,14 @@ class ProcessManager: for pid in self._process_map.copy(): try: - log.warning( - f"DGM process send_signal_to_processes kill pid '{pid}', signal '{signal_}'" - ) + dbg_msg = f"DGM process send_signal_to_processes kill pid '{pid}', signal '{signal_}'" + log.warning(dbg_msg) os.kill(pid, signal_) + dbg_msg = f"DGM process sent_signal_to_processes os.kill pid '{pid}', signal '{signal_}'" + log.warning(dbg_msg) except OSError as exc: - log.warning( - f"DGM process send_signal_to_processes OSError exc, '{exc}'" - ) + dbg_msg = f"DGM process send_signal_to_processes OSError exc, '{exc}'" + log.warning(dbg_msg) if exc.errno not in (errno.ESRCH, errno.EACCES): # If it's not a "No such process" error, raise it raise @@ -686,7 +698,9 @@ class ProcessManager: p_map["Process"].terminate() else: for pid, p_map in self._process_map.copy().items(): - log.warning("DGM Terminating pid %s: %s", pid, p_map["Process"]) + dgm_p_map = p_map["Process"] + dgm_msg = f"DGM process kill_children Terminating pid '{pid}': '{dgm_p_map}', args '{args}'" + log.warning(dgm_msg) log.trace("Terminating pid %s: %s", pid, p_map["Process"]) if args: # escalate the signal to the process @@ -696,6 +710,99 @@ class ProcessManager: pass try: p_map["Process"].terminate() + + # need to go through and clean up any resources left around like lock files if using gitfs + # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + mach_id = salt.utils.files.get_machine_identifier() + ## cache_dir = self.opts.get("cachedir", None) + ## gitfs_active = self.opts.get("gitfs_remotes", None) + cache_dir = "/tmp" + gitfs_active = True + terminate_pid = pid + dbg_msg = f"DGM process kill_children, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + log.warning(dbg_msg) + if cache_dir and gitfs_active: + # check for gitfs file locks to ensure no resource leaks + # last chance to clean up any missed unlock droppings + ## cache_dir = Path(cache_dir + "/gitfs/work") + cache_dir = Path(cache_dir) + dbg_msg = f"DGM process kill_children ,find for final cache_dir '{cache_dir}'" + log.warning(dbg_msg) + if cache_dir.exists and cache_dir.is_dir(): + file_list = list(cache_dir.glob("**/*.lk")) + dbg_msg = f"DGM process kill_children ,find for final cache_dir '{cache_dir}', produced glob file_list '{file_list}'" + log.warning(dbg_msg) + file_del_list = [] + + try: + for file_name in file_list: + dbg_msg = f"DGM process kill_children , checking file name '{file_name}'" + log.warning(dbg_msg) + with salt.utils.files.fopen(file_name, "r") as fd_: + try: + file_pid = int( + salt.utils.stringutils.to_unicode( + fd_.readline() + ).rstrip() + ) + except ValueError: + # Lock file is empty, set pid to 0 so it evaluates as False. + file_pid = 0 + try: + file_mach_id = int( + salt.utils.stringutils.to_unicode( + fd_.readline() + ).rstrip() + ) + except ValueError: + # Lock file is empty, set mach_id to 0 so it evaluates as False. + file_mach_id = 0 + + dbg_msg = f"DGM process kill_children , terminate_pid '{terminate_pid}', mach_id '{mach_id}', file_pid '{file_pid}', file_mach_id '{file_mach_id}'" + log.warning(dbg_msg) + if terminate_pid == file_pid: + if mach_id != file_mach_id: + if not file_mach_id: + msg = f"gitfs lock file for pid '{file_pid}' does not contain a machine id, deleting lock file which may affect if using multi-master with shared gitfs cache, the lock may have been obtained by another master recommend updating Salt version on other masters to a version which insert machine identification in lock a file." + log.debug(msg) + file_del_list.append( + (file_name, file_pid, file_mach_id) + ) + else: + file_del_list.append( + (file_name, file_pid, file_mach_id) + ) + + except FileNotFoundError: + log.debug("gitfs lock file: %s not found", file_name) + + for file_name, file_pid, file_mach_id in file_del_list: + try: + dbg_msg = f"DGM process kill_children file_pid '{file_pid}', file_mach_id '{file_mach_id}', removing file name '{file_name}'" + log.warning(dbg_msg) + os.remove(file_name) + except OSError as exc: + if exc.errno == errno.ENOENT: + # No lock file present + msg = f"SIGTERM clean up of resources attempted to remove lock file {file_name}, pid '{file_pid}', machine identifier '{mach_id}' but it did not exist, exception : {exc} " + log.debug(msg) + + elif exc.errno == errno.EISDIR: + # Somehow this path is a directory. Should never happen + # unless some wiseguy manually creates a directory at this + # path, but just in case, handle it. + try: + shutil.rmtree(file_name) + except OSError as exc: + msg = f"SIGTERM clean up of resources, lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}' was a directory, removed directory, exception : '{exc}'" + log.debug(msg) + else: + msg = f"SIGTERM clean up of resources, unable to remove lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}', exception : '{exc}'" + log.debug(msg) + else: + msg = f"SIGTERM clean up of resources, removed lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}'" + log.debug(msg) + except OSError as exc: if exc.errno not in (errno.ESRCH, errno.EACCES): raise @@ -792,7 +899,8 @@ class ProcessManager: log.warning("DGM process terminate exit") def _handle_signals(self, *args, **kwargs): - log.warning(f"DGM process _handle_signals args '{args}', kwargs '{kwargs}'") + dbg_msg = f"DGM process _handle_signals args '{args}', kwargs '{kwargs}'" + log.warning(dbg_msg) # first lets reset signal handlers to default one to prevent running this twice signal.signal(signal.SIGTERM, signal.SIG_IGN) @@ -803,6 +911,8 @@ class ProcessManager: # check that this is the correct process, children inherit this # handler, if we are in a child lets just run the original handler + dbg_msg = f"DGM process _handle_signals os.getpid '{os.getpid()}', self pid '{self._pid}'" + log.warning(dbg_msg) if os.getpid() != self._pid: if callable(self._sigterm_handler): return self._sigterm_handler(*args) @@ -811,6 +921,8 @@ class ProcessManager: else: return + dbg_msg = f"DGM process _handle_signals call self.kill_children, args '{args}', kwargs '{kwargs}'" + log.warning(dbg_msg) # Terminate child processes self.kill_children(*args, **kwargs) @@ -1077,9 +1189,8 @@ class SignalHandlingProcess(Process): signal.signal(signal.SIGTERM, self._handle_signals) def _handle_signals(self, signum, sigframe): - log.warning( - f"DGM SignalHandlingProcess _handle_signals, signum '{signum}', sigframe '{sigframe}'" - ) + dbg_msg = f"DGM SignalHandlingProcess _handle_signals, signum '{signum}', sigframe '{sigframe}'" + log.warning(dbg_msg) signal.signal(signal.SIGTERM, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN) @@ -1092,13 +1203,13 @@ class SignalHandlingProcess(Process): log.debug(msg) mach_id = salt.utils.files.get_machine_identifier() - log.debug( + dbg_msg = ( f"exiting for process id '{os.getpid()}' and machine identifer '{mach_id}'" ) + log.debug(dbg_msg) - log.warning( - f"DGM _handle_signals about to check HAS_PSUTIL, for process id '{os.getpid()}' and machine identifer '{mach_id}'" - ) + dbg_msg = f"DGM _handle_signals about to check HAS_PSUTIL, for process id '{os.getpid()}' and machine identifer '{mach_id}'" + log.warning(dbg_msg) if HAS_PSUTIL: try: @@ -1121,28 +1232,24 @@ class SignalHandlingProcess(Process): # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) - log.warning( - f"DGM _handle_signals HAS_PSUTIL, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" - ) + dbg_msg = f"DGM _handle_signals HAS_PSUTIL, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + log.warning(dbg_msg) if cache_dir and gitfs_active: # check for gitfs file locks to ensure no resource leaks # last chance to clean up any missed unlock droppings cache_dir = Path(cache_dir + "/gitfs/work") - log.warning( - f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}'" - ) + dbg_msg = f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}'" + log.warning(dbg_msg) if cache_dir.exists and cache_dir.is_dir(): file_list = list(cache_dir.glob("**/*.lk")) - log.warning( - f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}', produced glob file_list '{file_list}'" - ) + dbg_msg = f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}', produced glob file_list '{file_list}'" + log.warning(dbg_msg) file_del_list = [] try: for file_name in file_list: - log.warning( - f"DGM _handle_signals HAS_PSUTIL, checking file name '{file_name}'" - ) + dbg_msg = f"DGM _handle_signals HAS_PSUTIL, checking file name '{file_name}'" + log.warning(dbg_msg) with salt.utils.files.fopen(file_name, "r") as fd_: try: file_pid = int( @@ -1162,9 +1269,8 @@ class SignalHandlingProcess(Process): except ValueError: # Lock file is empty, set mach_id to 0 so it evaluates as False. file_mach_id = 0 - log.warning( - f"DGM _handle_signals HAS_PSUTIL, cur_pid '{cur_pid}', mach_id '{mach_id}', file_pid '{file_pid}', file_mach_id '{file_mach_id}'" - ) + dbg_msg = f"DGM _handle_signals HAS_PSUTIL, cur_pid '{cur_pid}', mach_id '{mach_id}', file_pid '{file_pid}', file_mach_id '{file_mach_id}'" + log.warning(dbg_msg) if cur_pid == file_pid: if mach_id != file_mach_id: if not file_mach_id: @@ -1181,11 +1287,10 @@ class SignalHandlingProcess(Process): except FileNotFoundError: log.debug("gitfs lock file: %s not found", file_name) - for (file_name, file_pid, file_mach_id) in file_del_list: + for file_name, file_pid, file_mach_id in file_del_list: try: - log.warning( - f"DGM _handle_signals file_pid '{file_pid}', file_mach_id '{file_mach_id}', removing file name '{file_name}'" - ) + dbg_msg = f"DGM _handle_signals file_pid '{file_pid}', file_mach_id '{file_mach_id}', removing file name '{file_name}'" + log.warning(dbg_msg) os.remove(file_name) except OSError as exc: if exc.errno == errno.ENOENT: @@ -1229,6 +1334,8 @@ def default_signals(*signals): """ Temporarily restore signals to their default values. """ + dbg_msg = f"DGM default_signals entry, signals '{signals}'" + log.warning(dbg_msg) old_signals = {} for signum in signals: try: @@ -1238,6 +1345,7 @@ def default_signals(*signals): # This happens when a netapi module attempts to run a function # using wheel_async, because the process trying to register signals # will not be the main PID. + log.warning("DGM Failed to register signal for signum %d: %s", signum, exc) log.trace("Failed to register signal for signum %d: %s", signum, exc) else: old_signals[signum] = saved_signal @@ -1247,6 +1355,8 @@ def default_signals(*signals): yield finally: # Restore signals + dbg_msg = f"DGM default_signals entry, restoring old signals '{old_signals}'" + log.warning(dbg_msg) for signum in old_signals: signal.signal(signum, old_signals[signum]) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 01405d394ee..03508908e0e 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -5,6 +5,7 @@ any remotes. import logging import pathlib +import signal import tempfile import time @@ -22,7 +23,14 @@ from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env from tests.support.runtests import RUNTIME_VARS +# import multiprocessing + + log = logging.getLogger(__name__) +## logger = multiprocessing.log_to_stderr() +## logger.setLevel(logging.INFO) +## ## log = logger.getLogger(__name__) +## log = logger.getLogger() @pytest.fixture(scope="session", autouse=True) @@ -319,21 +327,25 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): ) def init_remote(self): - log.debug(f"DGM MockedProvider init_remote tmp_name '{tmp_name}'") + dbg_msg = f"DGM MockedProvider init_remote tmp_name '{tmp_name}'" + log.debug(dbg_msg) self.gitdir = salt.utils.path.join(tmp_name, ".git") - log.debug(f"DGM MockedProvider init_remote gitdir '{self.gitdir}'") + dbg_msg = f"DGM MockedProvider init_remote gitdir '{self.gitdir}'" + log.debug(dbg_msg) self.repo = True new = False return new def envs(self): dgm_test_base = ["base"] - log.debug(f"DGM MockedProvider env base '{dgm_test_base}'") + dbg_msg = f"DGM MockedProvider env base '{dgm_test_base}'" + log.debug(dbg_msg) return ["base"] def _fetch(self): self.fetched = True - log.debug(f"DGM MockedProvider _fetch self.fetched '{self.fetched}'") + dbg_msg = f"DGM MockedProvider _fetch self.fetched '{self.fetched}'" + log.debug(dbg_msg) # Clear the instance map so that we make sure to create a new instance # for this test class. @@ -477,7 +489,7 @@ def process_kill_test(main_class): Check that lock is obtained and then it should be released by SIGTERM checks """ - log.debug("DGM process_kill_test entry") + log.debug("DGM process_kill_test entry pid, '{os.getpid()}'") provider = main_class.remotes[0] provider.lock() @@ -502,48 +514,71 @@ def test_git_provider_sigterm_cleanup(main_class, caplog): provider = main_class.remotes[0] - procmgr = salt.utils.process.ProcessManager(wait_for_kill=30) - proc = procmgr.add_process(process_kill_test, args=(main_class,), name="test_kill") + ## DGM find lock file location + ## provider.lock() + ## file_name = provider._get_lock_file("update") + ## log.debug(f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'") + + ## proc = multiprocessing.Process(target=process_kill_test) + ## procmgr = salt.utils.process.ProcessManager(wait_for_kill=30) + ## proc = procmgr.add_process(process_kill_test, args=(main_class,), name="test_kill") ## proc.start() - while not proc.is_alive(): - log.debug( - "DGM test_git_provider_sigterm_cleanup sleeping waiting for child process to become alive" + # Reset signals to default ones before adding processes to the process + # manager. We don't want the processes being started to inherit those + # signal handlers + with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): + procmgr = salt.utils.process.ProcessManager(wait_for_kill=5) + proc = procmgr.add_process( + process_kill_test, args=(main_class,), name="test_kill" ) + + # Install the SIGINT/SIGTERM handlers if not done so far + if signal.getsignal(signal.SIGINT) is signal.SIG_DFL: + # No custom signal handling was added, install our own + signal.signal(signal.SIGINT, procmgr._handle_signals) + + if signal.getsignal(signal.SIGTERM) is signal.SIG_DFL: + # No custom signal handling was added, install our own + signal.signal(signal.SIGTERM, procmgr._handle_signals) + + while not proc.is_alive(): + dbg_msg = "DGM test_git_provider_sigterm_cleanup sleeping waiting for child process to become alive" + log.debug(dbg_msg) time.sleep(1) # give some time for it to be started # child process should be alive - log.debug( - f"DGM test_git_provider_sigterm_cleanup child process is alive with pid '{proc.pid}'" - ) + dbg_msg = f"DGM test_git_provider_sigterm_cleanup child process is alive with pid '{proc.pid}'" + log.debug(dbg_msg) file_name = provider._get_lock_file("update") - log.debug( - f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'" - ) + dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'" + log.debug(dbg_msg) assert pathlib.Path(file_name).exists() assert pathlib.Path(file_name).is_file() - log.debug( - f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', exists and is a file, send SIGTERM signal" - ) + dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', exists and is a file, send SIGTERM signal" + log.debug(dbg_msg) - proc.terminate() # sends a SIGTERM + procmgr.terminate() # sends a SIGTERM time.sleep(1) # give some time for it to terminate log.debug("DGM test_git_provider_sigterm_cleanup lock , post terminate") assert not proc.is_alive() - log.debug("DGM test_git_provider_sigterm_cleanup lock , child is not alive") + + dbg_msg = "DGM test_git_provider_sigterm_cleanup lock , child is not alive" + log.debug(dbg_msg) test_file_exits = pathlib.Path(file_name).exists() - log.debug( - f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does it exist anymore '{test_file_exits}'" - ) + + dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does it exist anymore '{test_file_exits}'" + log.debug(dbg_msg) + assert not pathlib.Path(file_name).exists() - log.debug( - f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does NOT exist anymore" - ) + + dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does NOT exist anymore" + log.debug(dbg_msg) log.debug("DGM test_git_provider_sigterm_cleanup exit") From 2265376154d63ea8683af415b371b6cda4d12d5c Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 5 Mar 2024 11:59:13 -0700 Subject: [PATCH 022/160] Working test for SIGTERM of held lock --- salt/utils/gitfs.py | 52 ++++--- salt/utils/process.py | 155 +------------------ tests/pytests/unit/utils/test_gitfs_locks.py | 100 ++++-------- 3 files changed, 72 insertions(+), 235 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 84546c24b46..3f970fe7fc3 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -949,6 +949,7 @@ class GitProvider: os.write(fh_, salt.utils.stringutils.to_bytes(str(os.getpid()))) os.write(fh_, salt.utils.stringutils.to_bytes("\n")) os.write(fh_, salt.utils.stringutils.to_bytes(str(self.mach_id))) + os.write(fh_, salt.utils.stringutils.to_bytes("\n")) except OSError as exc: if exc.errno == errno.EEXIST: @@ -957,40 +958,57 @@ class GitProvider: pid = int( salt.utils.stringutils.to_unicode(fd_.readline()).rstrip() ) - mach_id = int( - salt.utils.stringutils.to_unicode(fd_.readline()).rstrip() - ) except ValueError: # Lock file is empty, set pid to 0 so it evaluates as # False. pid = 0 + try: + mach_id = salt.utils.stringutils.to_unicode( + fd_.readline() + ).rstrip() + except ValueError as exc: + # Lock file is empty, set machine id to 0 so it evaluates as + # False. mach_id = 0 + global_lock_key = self.role + "_global_lock" lock_file = self._get_lock_file(lock_type=lock_type) if self.opts[global_lock_key]: msg = ( f"{global_lock_key} is enabled and {lock_type} lockfile {lock_file} is present for " - f"{self.role} remote '{self.id}' on machine_id {self.mach_id}." + f"{self.role} remote '{self.id}' on machine_id {self.mach_id} with pid '{pid}'." ) if pid: msg += f" Process {pid} obtained the lock" if self.mach_id or mach_id: - msg += f" Process {pid} obtained the lock for machine_id {mach_id}, current machine_id {self.mach_id}" - else: - msg += f" Process {pid} obtained the lock" + msg += f" for machine_id {mach_id}, current machine_id {self.mach_id}" if not pid_exists(pid): - msg += ( - " but this process is not running. The " - "update may have been interrupted. If " - "using multi-master with shared gitfs " - "cache, the lock may have been obtained " - "by another master" - ) if self.mach_id != mach_id: - msg += f", with machine_id {mach_id}" + msg += ( + " but this process is not running. The " + "update may have been interrupted. If " + "using multi-master with shared gitfs " + "cache, the lock may have been obtained " + "by another master, with machine_id {mach_id}" + ) else: - msg += "." + msg += ( + " but this process is not running. The " + "update may have been interrupted. " + " Given this process is for the same machine " + " the lock will be reallocated to new process " + ) + log.warning(msg) + success, fail = self._clear_lock() + if success: + return self.__lock( + lock_type="update", failhard=failhard + ) + elif failhard: + raise + return + log.warning(msg) if failhard: raise @@ -1041,8 +1059,6 @@ class GitProvider: contextmanager here because the lock is meant to stay and not be automatically removed. """ - dbg_msg = f"DGM GitProvider lock entry, pid '{os.getpid()}'" - log.warning(dbg_msg) success = [] failed = [] try: diff --git a/salt/utils/process.py b/salt/utils/process.py index aed2633d454..b49a907ee93 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -514,8 +514,6 @@ class ProcessManager: This will deterimine if it is a Process class, otherwise it assumes it is a function """ - dbg_msg = f"DGM process add_process entry, tgt '{tgt}', args '{args}', kwargs '{kwargs}', name '{name}'" - log.warning(dbg_msg) if args is None: args = [] if kwargs is None: @@ -531,14 +529,8 @@ class ProcessManager: if isinstance(process, SignalHandlingProcess): with default_signals(signal.SIGINT, signal.SIGTERM): - log.warning( - "DGM process add_process with default_signals, process start" - ) process.start() else: - log.warning( - "DGM process add_process without default_signals, process start" - ) process.start() log.debug("Started '%s' with pid %s", process.name, process.pid) @@ -587,9 +579,6 @@ class ProcessManager: self._restart_processes = False def send_signal_to_processes(self, signal_): - dbg_msg = f"DGM process send_signal_to_processes signal '{signal_}'" - log.warning(dbg_msg) - if salt.utils.platform.is_windows() and signal_ in ( signal.SIGTERM, signal.SIGINT, @@ -608,14 +597,8 @@ class ProcessManager: for pid in self._process_map.copy(): try: - dbg_msg = f"DGM process send_signal_to_processes kill pid '{pid}', signal '{signal_}'" - log.warning(dbg_msg) os.kill(pid, signal_) - dbg_msg = f"DGM process sent_signal_to_processes os.kill pid '{pid}', signal '{signal_}'" - log.warning(dbg_msg) except OSError as exc: - dbg_msg = f"DGM process send_signal_to_processes OSError exc, '{exc}'" - log.warning(dbg_msg) if exc.errno not in (errno.ESRCH, errno.EACCES): # If it's not a "No such process" error, raise it raise @@ -677,7 +660,6 @@ class ProcessManager: """ Kill all of the children """ - log.warning("DGM process kill_children entry") if salt.utils.platform.is_windows(): if multiprocessing.current_process().name != "MainProcess": # Since the main process will kill subprocesses by tree, @@ -698,9 +680,6 @@ class ProcessManager: p_map["Process"].terminate() else: for pid, p_map in self._process_map.copy().items(): - dgm_p_map = p_map["Process"] - dgm_msg = f"DGM process kill_children Terminating pid '{pid}': '{dgm_p_map}', args '{args}'" - log.warning(dgm_msg) log.trace("Terminating pid %s: %s", pid, p_map["Process"]) if args: # escalate the signal to the process @@ -711,98 +690,6 @@ class ProcessManager: try: p_map["Process"].terminate() - # need to go through and clean up any resources left around like lock files if using gitfs - # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk - mach_id = salt.utils.files.get_machine_identifier() - ## cache_dir = self.opts.get("cachedir", None) - ## gitfs_active = self.opts.get("gitfs_remotes", None) - cache_dir = "/tmp" - gitfs_active = True - terminate_pid = pid - dbg_msg = f"DGM process kill_children, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" - log.warning(dbg_msg) - if cache_dir and gitfs_active: - # check for gitfs file locks to ensure no resource leaks - # last chance to clean up any missed unlock droppings - ## cache_dir = Path(cache_dir + "/gitfs/work") - cache_dir = Path(cache_dir) - dbg_msg = f"DGM process kill_children ,find for final cache_dir '{cache_dir}'" - log.warning(dbg_msg) - if cache_dir.exists and cache_dir.is_dir(): - file_list = list(cache_dir.glob("**/*.lk")) - dbg_msg = f"DGM process kill_children ,find for final cache_dir '{cache_dir}', produced glob file_list '{file_list}'" - log.warning(dbg_msg) - file_del_list = [] - - try: - for file_name in file_list: - dbg_msg = f"DGM process kill_children , checking file name '{file_name}'" - log.warning(dbg_msg) - with salt.utils.files.fopen(file_name, "r") as fd_: - try: - file_pid = int( - salt.utils.stringutils.to_unicode( - fd_.readline() - ).rstrip() - ) - except ValueError: - # Lock file is empty, set pid to 0 so it evaluates as False. - file_pid = 0 - try: - file_mach_id = int( - salt.utils.stringutils.to_unicode( - fd_.readline() - ).rstrip() - ) - except ValueError: - # Lock file is empty, set mach_id to 0 so it evaluates as False. - file_mach_id = 0 - - dbg_msg = f"DGM process kill_children , terminate_pid '{terminate_pid}', mach_id '{mach_id}', file_pid '{file_pid}', file_mach_id '{file_mach_id}'" - log.warning(dbg_msg) - if terminate_pid == file_pid: - if mach_id != file_mach_id: - if not file_mach_id: - msg = f"gitfs lock file for pid '{file_pid}' does not contain a machine id, deleting lock file which may affect if using multi-master with shared gitfs cache, the lock may have been obtained by another master recommend updating Salt version on other masters to a version which insert machine identification in lock a file." - log.debug(msg) - file_del_list.append( - (file_name, file_pid, file_mach_id) - ) - else: - file_del_list.append( - (file_name, file_pid, file_mach_id) - ) - - except FileNotFoundError: - log.debug("gitfs lock file: %s not found", file_name) - - for file_name, file_pid, file_mach_id in file_del_list: - try: - dbg_msg = f"DGM process kill_children file_pid '{file_pid}', file_mach_id '{file_mach_id}', removing file name '{file_name}'" - log.warning(dbg_msg) - os.remove(file_name) - except OSError as exc: - if exc.errno == errno.ENOENT: - # No lock file present - msg = f"SIGTERM clean up of resources attempted to remove lock file {file_name}, pid '{file_pid}', machine identifier '{mach_id}' but it did not exist, exception : {exc} " - log.debug(msg) - - elif exc.errno == errno.EISDIR: - # Somehow this path is a directory. Should never happen - # unless some wiseguy manually creates a directory at this - # path, but just in case, handle it. - try: - shutil.rmtree(file_name) - except OSError as exc: - msg = f"SIGTERM clean up of resources, lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}' was a directory, removed directory, exception : '{exc}'" - log.debug(msg) - else: - msg = f"SIGTERM clean up of resources, unable to remove lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}', exception : '{exc}'" - log.debug(msg) - else: - msg = f"SIGTERM clean up of resources, removed lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}'" - log.debug(msg) - except OSError as exc: if exc.errno not in (errno.ESRCH, errno.EACCES): raise @@ -890,18 +777,11 @@ class ProcessManager: """ Properly terminate this process manager instance """ - log.warning("DGM process terminate entry") self.stop_restarting() - log.warning("DGM process terminate send signal SIGTERM") self.send_signal_to_processes(signal.SIGTERM) - log.warning("DGM process terminate kill children") self.kill_children() - log.warning("DGM process terminate exit") def _handle_signals(self, *args, **kwargs): - dbg_msg = f"DGM process _handle_signals args '{args}', kwargs '{kwargs}'" - log.warning(dbg_msg) - # first lets reset signal handlers to default one to prevent running this twice signal.signal(signal.SIGTERM, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN) @@ -911,8 +791,6 @@ class ProcessManager: # check that this is the correct process, children inherit this # handler, if we are in a child lets just run the original handler - dbg_msg = f"DGM process _handle_signals os.getpid '{os.getpid()}', self pid '{self._pid}'" - log.warning(dbg_msg) if os.getpid() != self._pid: if callable(self._sigterm_handler): return self._sigterm_handler(*args) @@ -921,8 +799,6 @@ class ProcessManager: else: return - dbg_msg = f"DGM process _handle_signals call self.kill_children, args '{args}', kwargs '{kwargs}'" - log.warning(dbg_msg) # Terminate child processes self.kill_children(*args, **kwargs) @@ -1189,9 +1065,6 @@ class SignalHandlingProcess(Process): signal.signal(signal.SIGTERM, self._handle_signals) def _handle_signals(self, signum, sigframe): - dbg_msg = f"DGM SignalHandlingProcess _handle_signals, signum '{signum}', sigframe '{sigframe}'" - log.warning(dbg_msg) - signal.signal(signal.SIGTERM, signal.SIG_IGN) signal.signal(signal.SIGINT, signal.SIG_IGN) msg = f"{self.__class__.__name__} received a " @@ -1208,12 +1081,9 @@ class SignalHandlingProcess(Process): ) log.debug(dbg_msg) - dbg_msg = f"DGM _handle_signals about to check HAS_PSUTIL, for process id '{os.getpid()}' and machine identifer '{mach_id}'" - log.warning(dbg_msg) - + cur_pid = os.getpid() if HAS_PSUTIL: try: - cur_pid = os.getpid() process = psutil.Process(cur_pid) if hasattr(process, "children"): for child in process.children(recursive=True): @@ -1232,24 +1102,17 @@ class SignalHandlingProcess(Process): # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) - dbg_msg = f"DGM _handle_signals HAS_PSUTIL, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" - log.warning(dbg_msg) if cache_dir and gitfs_active: # check for gitfs file locks to ensure no resource leaks # last chance to clean up any missed unlock droppings cache_dir = Path(cache_dir + "/gitfs/work") - dbg_msg = f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}'" - log.warning(dbg_msg) if cache_dir.exists and cache_dir.is_dir(): file_list = list(cache_dir.glob("**/*.lk")) - dbg_msg = f"DGM _handle_signals HAS_PSUTIL,find for final cache_dir '{cache_dir}', produced glob file_list '{file_list}'" - log.warning(dbg_msg) file_del_list = [] - + file_pid = 0 + file_mach_id = 0 try: for file_name in file_list: - dbg_msg = f"DGM _handle_signals HAS_PSUTIL, checking file name '{file_name}'" - log.warning(dbg_msg) with salt.utils.files.fopen(file_name, "r") as fd_: try: file_pid = int( @@ -1261,7 +1124,7 @@ class SignalHandlingProcess(Process): # Lock file is empty, set pid to 0 so it evaluates as False. file_pid = 0 try: - file_mach_id = int( + file_mach_id = ( salt.utils.stringutils.to_unicode( fd_.readline() ).rstrip() @@ -1269,8 +1132,7 @@ class SignalHandlingProcess(Process): except ValueError: # Lock file is empty, set mach_id to 0 so it evaluates as False. file_mach_id = 0 - dbg_msg = f"DGM _handle_signals HAS_PSUTIL, cur_pid '{cur_pid}', mach_id '{mach_id}', file_pid '{file_pid}', file_mach_id '{file_mach_id}'" - log.warning(dbg_msg) + if cur_pid == file_pid: if mach_id != file_mach_id: if not file_mach_id: @@ -1289,8 +1151,6 @@ class SignalHandlingProcess(Process): for file_name, file_pid, file_mach_id in file_del_list: try: - dbg_msg = f"DGM _handle_signals file_pid '{file_pid}', file_mach_id '{file_mach_id}', removing file name '{file_name}'" - log.warning(dbg_msg) os.remove(file_name) except OSError as exc: if exc.errno == errno.ENOENT: @@ -1334,8 +1194,6 @@ def default_signals(*signals): """ Temporarily restore signals to their default values. """ - dbg_msg = f"DGM default_signals entry, signals '{signals}'" - log.warning(dbg_msg) old_signals = {} for signum in signals: try: @@ -1345,7 +1203,6 @@ def default_signals(*signals): # This happens when a netapi module attempts to run a function # using wheel_async, because the process trying to register signals # will not be the main PID. - log.warning("DGM Failed to register signal for signum %d: %s", signum, exc) log.trace("Failed to register signal for signum %d: %s", signum, exc) else: old_signals[signum] = saved_signal @@ -1355,8 +1212,6 @@ def default_signals(*signals): yield finally: # Restore signals - dbg_msg = f"DGM default_signals entry, restoring old signals '{old_signals}'" - log.warning(dbg_msg) for signum in old_signals: signal.signal(signum, old_signals[signum]) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 03508908e0e..1cf23d84ef7 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -4,6 +4,7 @@ any remotes. """ import logging +import os import pathlib import signal import tempfile @@ -23,14 +24,7 @@ from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env from tests.support.runtests import RUNTIME_VARS -# import multiprocessing - - log = logging.getLogger(__name__) -## logger = multiprocessing.log_to_stderr() -## logger.setLevel(logging.INFO) -## ## log = logger.getLogger(__name__) -## log = logger.getLogger() @pytest.fixture(scope="session", autouse=True) @@ -327,25 +321,16 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): ) def init_remote(self): - dbg_msg = f"DGM MockedProvider init_remote tmp_name '{tmp_name}'" - log.debug(dbg_msg) self.gitdir = salt.utils.path.join(tmp_name, ".git") - dbg_msg = f"DGM MockedProvider init_remote gitdir '{self.gitdir}'" - log.debug(dbg_msg) self.repo = True new = False return new def envs(self): - dgm_test_base = ["base"] - dbg_msg = f"DGM MockedProvider env base '{dgm_test_base}'" - log.debug(dbg_msg) return ["base"] def _fetch(self): self.fetched = True - dbg_msg = f"DGM MockedProvider _fetch self.fetched '{self.fetched}'" - log.debug(dbg_msg) # Clear the instance map so that we make sure to create a new instance # for this test class. @@ -482,25 +467,37 @@ def test_git_provider_mp_gen_lock(main_class, caplog): assert test_msg3 in caplog.text -def process_kill_test(main_class): +class KillProcessTest(salt.utils.process.SignalHandlingProcess): """ - Process to obtain a lock and hold it, - which will then be given a SIGTERM to ensure clean up of resources for the lock - - Check that lock is obtained and then it should be released by SIGTERM checks + Test process for which to kill and check lock resources are cleaned up """ - log.debug("DGM process_kill_test entry pid, '{os.getpid()}'") - provider = main_class.remotes[0] - provider.lock() - log.debug("DGM process_kill_test obtained lock") + def __init__(self, provider, **kwargs): + super().__init__(**kwargs) + self.provider = provider + self.opts = provider.opts + self.threads = {} - # check that lock has been released - assert provider._master_lock.acquire(timeout=5) - log.debug("DGM process_kill_test tested assert masterlock acquire") + def run(self): + """ + Start the test process to kill + """ + log.debug("DGM kill_test_process entry pid %s", os.getpid()) - time.sleep(20) # give time for kill by sigterm - log.debug("DGM process_kill_test exit") + ## provider = main_class.remotes[0] + self.provider.lock() + + log.debug("DGM kill_test_process obtained lock") + + # check that lock has been released + assert self.provider._master_lock.acquire(timeout=5) + log.debug("DGM kill_test_process tested assert masterlock acquire") + + while True: + tsleep = 1 + time.sleep(tsleep) # give time for kill by sigterm + + log.debug("DGM kill_test_process exit") @pytest.mark.slow_test @@ -514,43 +511,18 @@ def test_git_provider_sigterm_cleanup(main_class, caplog): provider = main_class.remotes[0] - ## DGM find lock file location - ## provider.lock() - ## file_name = provider._get_lock_file("update") - ## log.debug(f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'") - - ## proc = multiprocessing.Process(target=process_kill_test) - ## procmgr = salt.utils.process.ProcessManager(wait_for_kill=30) - ## proc = procmgr.add_process(process_kill_test, args=(main_class,), name="test_kill") - ## proc.start() - - # Reset signals to default ones before adding processes to the process - # manager. We don't want the processes being started to inherit those - # signal handlers + log.debug("DGM test_git_provider_sigterm_cleanup, get procmgn and add process") with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): - procmgr = salt.utils.process.ProcessManager(wait_for_kill=5) - proc = procmgr.add_process( - process_kill_test, args=(main_class,), name="test_kill" - ) - - # Install the SIGINT/SIGTERM handlers if not done so far - if signal.getsignal(signal.SIGINT) is signal.SIG_DFL: - # No custom signal handling was added, install our own - signal.signal(signal.SIGINT, procmgr._handle_signals) - - if signal.getsignal(signal.SIGTERM) is signal.SIG_DFL: - # No custom signal handling was added, install our own - signal.signal(signal.SIGTERM, procmgr._handle_signals) + procmgr = salt.utils.process.ProcessManager(wait_for_kill=1) + proc = procmgr.add_process(KillProcessTest, args=(provider,), name="test_kill") + log.debug("DGM test_git_provider_sigterm_cleanup, check if process is alive") while not proc.is_alive(): - dbg_msg = "DGM test_git_provider_sigterm_cleanup sleeping waiting for child process to become alive" - log.debug(dbg_msg) time.sleep(1) # give some time for it to be started - # child process should be alive - dbg_msg = f"DGM test_git_provider_sigterm_cleanup child process is alive with pid '{proc.pid}'" - log.debug(dbg_msg) + procmgr.run() + # child process should be alive file_name = provider._get_lock_file("update") dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'" log.debug(dbg_msg) @@ -572,13 +544,7 @@ def test_git_provider_sigterm_cleanup(main_class, caplog): log.debug(dbg_msg) test_file_exits = pathlib.Path(file_name).exists() - dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does it exist anymore '{test_file_exits}'" log.debug(dbg_msg) assert not pathlib.Path(file_name).exists() - - dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does NOT exist anymore" - log.debug(dbg_msg) - - log.debug("DGM test_git_provider_sigterm_cleanup exit") From c982cbdb3eb73b4723c26d0f1bd01a267c6c6573 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 7 Mar 2024 15:52:10 -0700 Subject: [PATCH 023/160] Updated SIGTERM test, and additional tests, and cleanup --- salt/utils/gitfs.py | 4 +- tests/pytests/unit/utils/test_gitfs_locks.py | 405 +++++++++++-------- 2 files changed, 241 insertions(+), 168 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 3f970fe7fc3..defea96d426 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -990,13 +990,13 @@ class GitProvider: "update may have been interrupted. If " "using multi-master with shared gitfs " "cache, the lock may have been obtained " - "by another master, with machine_id {mach_id}" + f"by another master, with machine_id {mach_id}" ) else: msg += ( " but this process is not running. The " "update may have been interrupted. " - " Given this process is for the same machine " + " Given this process is for the same machine" " the lock will be reallocated to new process " ) log.warning(msg) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 1cf23d84ef7..b5eb1fe40a7 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -22,57 +22,51 @@ import salt.utils.platform import salt.utils.process from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env -from tests.support.runtests import RUNTIME_VARS + +try: + import pwd +except ImportError: + import salt.utils.win_functions log = logging.getLogger(__name__) -@pytest.fixture(scope="session", autouse=True) -def _create_old_tempdir(): - pathlib.Path(RUNTIME_VARS.TMP).mkdir(exist_ok=True, parents=True) +def _get_user(): + """ + Get the user associated with the current process. + """ + if salt.utils.platform.is_windows(): + return salt.utils.win_functions.get_current_user(with_domain=False) + return pwd.getpwuid(os.getuid())[0] -@pytest.fixture(scope="session", autouse=True) -def bridge_pytest_and_runtests( - reap_stray_processes, - salt_factories, - salt_syndic_master_factory, - salt_syndic_factory, - salt_master_factory, - salt_minion_factory, - salt_sub_minion_factory, - sshd_config_dir, -): - # Make sure unittest2 uses the pytest generated configuration - RUNTIME_VARS.RUNTIME_CONFIGS["master"] = freeze(salt_master_factory.config) - RUNTIME_VARS.RUNTIME_CONFIGS["minion"] = freeze(salt_minion_factory.config) - RUNTIME_VARS.RUNTIME_CONFIGS["sub_minion"] = freeze(salt_sub_minion_factory.config) - RUNTIME_VARS.RUNTIME_CONFIGS["syndic_master"] = freeze( - salt_syndic_master_factory.config - ) - RUNTIME_VARS.RUNTIME_CONFIGS["syndic"] = freeze(salt_syndic_factory.config) - RUNTIME_VARS.RUNTIME_CONFIGS["client_config"] = freeze( - salt.config.client_config(salt_master_factory.config["conf_file"]) +@pytest.fixture(scope="module", autouse=True) +def _factory_root_dir(salt_factories): + return salt_factories.root_dir.resolve() + + +@pytest.fixture(scope="module", autouse=True) +def _salt_master_factory_config_parent(salt_master_factory): + return pathlib.PurePath(salt_master_factory.config["conf_file"]).parent + + +@pytest.fixture(scope="module", autouse=True) +def _salt_master_factory_config_path(salt_master_factory): + return pathlib.PurePath(salt_master_factory.config["conf_file"]).parent.joinpath( + "master" ) - # Make sure unittest2 classes know their paths - RUNTIME_VARS.TMP_ROOT_DIR = str(salt_factories.root_dir.resolve()) - RUNTIME_VARS.TMP_CONF_DIR = pathlib.PurePath( - salt_master_factory.config["conf_file"] - ).parent - RUNTIME_VARS.TMP_MINION_CONF_DIR = pathlib.PurePath( - salt_minion_factory.config["conf_file"] - ).parent - RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR = pathlib.PurePath( - salt_sub_minion_factory.config["conf_file"] - ).parent - RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR = pathlib.PurePath( - salt_syndic_master_factory.config["conf_file"] - ).parent - RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR = pathlib.PurePath( - salt_syndic_factory.config["conf_file"] - ).parent - RUNTIME_VARS.TMP_SSH_CONF_DIR = str(sshd_config_dir) + +@pytest.fixture(scope="module", autouse=True) +def _salt_minion_factory_config_path(salt_minion_factory): + return pathlib.PurePath(salt_minion_factory.config["conf_file"]).parent.joinpath( + "minion" + ) + + +@pytest.fixture(scope="module", autouse=True) +def _create_old_tempdir(_factory_root_dir): + return pathlib.Path(str(_factory_root_dir)).mkdir(exist_ok=True, parents=True) ## @pytest.fixture @@ -95,31 +89,32 @@ def _clear_instance_map(): class AdaptedConfigurationTestCaseMixin: - ## __slots__ = () - @staticmethod - def get_temp_config(config_for, **config_overrides): + def get_temp_config(config_for, _factory_root_dir, **config_overrides): - rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) + rootdir = config_overrides.get("root_dir", str(_factory_root_dir)) if not pathlib.Path(rootdir).exists(): - pathlib.Path(RUNTIME_VARS.TMP).mkdir(exist_ok=True, parents=True) + pathlib.Path(str(_factory_root_dir)).mkdir(exist_ok=True, parents=True) - rootdir = config_overrides.get("root_dir", RUNTIME_VARS.TMP) + rootdir = config_overrides.get("root_dir", str(_factory_root_dir)) conf_dir = config_overrides.pop( - "conf_dir", pathlib.PurePath(rootdir).joinpath("conf") + "conf_dir", str(pathlib.PurePath(rootdir).joinpath("conf")) ) + + curr_user = _get_user() for key in ("cachedir", "pki_dir", "sock_dir"): if key not in config_overrides: config_overrides[key] = key if "log_file" not in config_overrides: config_overrides["log_file"] = f"logs/{config_for}.log".format() if "user" not in config_overrides: - config_overrides["user"] = RUNTIME_VARS.RUNNING_TESTS_USER + config_overrides["user"] = curr_user config_overrides["root_dir"] = rootdir cdict = AdaptedConfigurationTestCaseMixin.get_config( - config_for, from_scratch=True + config_for, + from_scratch=True, ) if config_for in ("master", "client_config"): @@ -153,7 +148,7 @@ class AdaptedConfigurationTestCaseMixin: rdict["sock_dir"], conf_dir, ], - RUNTIME_VARS.RUNNING_TESTS_USER, + curr_user, root_dir=rdict["root_dir"], ) @@ -163,66 +158,42 @@ class AdaptedConfigurationTestCaseMixin: return rdict @staticmethod - def get_config(config_for, from_scratch=False): + def get_config( + config_for, + from_scratch=False, + ): if from_scratch: - if config_for in ("master", "syndic_master", "mm_master", "mm_sub_master"): - return salt.config.master_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path(config_for) + if config_for in ("master"): + return salt.config.master_config(str(_salt_master_factory_config_path)) + elif config_for in ("minion"): + return salt.config.minion_config(str(_salt_minion_factory_config_path)) + elif config_for == "client_config": + return salt.config_client_config(str(_salt_master_factory_config_path)) + if config_for not in ("master", "minion", "client_config"): + if config_for in ("master"): + return freeze( + salt.config.master_config(str(_salt_master_factory_config_path)) ) - elif config_for in ("minion", "sub_minion"): - return salt.config.minion_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path(config_for), - cache_minion_id=False, - ) - elif config_for in ("syndic",): - return salt.config.syndic_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path(config_for), - AdaptedConfigurationTestCaseMixin.get_config_file_path("minion"), + elif config_for in ("minion"): + return freeze( + salt.config.minion_config(str(_salt_minion_factory_config_path)) ) elif config_for == "client_config": - return salt.config.client_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path("master") + return freeze( + salt.config.client_config(str(_salt_master_factory_config_path)) ) - if config_for not in RUNTIME_VARS.RUNTIME_CONFIGS: - if config_for in ("master", "syndic_master", "mm_master", "mm_sub_master"): - RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( - salt.config.master_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path( - config_for - ) - ) - ) - elif config_for in ("minion", "sub_minion"): - RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( - salt.config.minion_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path( - config_for - ) - ) - ) - elif config_for in ("syndic",): - RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( - salt.config.syndic_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path( - config_for - ), - AdaptedConfigurationTestCaseMixin.get_config_file_path( - "minion" - ), - ) - ) - elif config_for == "client_config": - RUNTIME_VARS.RUNTIME_CONFIGS[config_for] = freeze( - salt.config.client_config( - AdaptedConfigurationTestCaseMixin.get_config_file_path("master") - ) - ) - return RUNTIME_VARS.RUNTIME_CONFIGS[config_for] + log.error( + "Should not reach this section of code for get_config, missing support for input config_for %s", + config_for, + ) + + # at least return master's config + return freeze(salt.config.master_config(str(_salt_master_factory_config_path))) @property def config_dir(self): - return RUNTIME_VARS.TMP_CONF_DIR + return str(_salt_master_factory_config_parent) def get_config_dir(self): log.warning("Use the config_dir attribute instead of calling get_config_dir()") @@ -231,34 +202,12 @@ class AdaptedConfigurationTestCaseMixin: @staticmethod def get_config_file_path(filename): if filename == "master": - return pathlib.PurePath(RUNTIME_VARS.TMP_CONF_DIR).joinpath(filename) + return str(_salt_master_factory_config_path) + if filename == "minion": - return pathlib.PurePath(RUNTIME_VARS.TMP_MINION_CONF_DIR).joinpath(filename) - if filename == "syndic_master": - return pathlib.PurePath(RUNTIME_VARS.TMP_SYNDIC_MASTER_CONF_DIR).joinpath( - "master" - ) - if filename == "syndic": - return pathlib.PurePath(RUNTIME_VARS.TMP_SYNDIC_MINION_CONF_DIR).joinpath( - "minion" - ) - if filename == "sub_minion": - return pathlib.PurePath(RUNTIME_VARS.TMP_SUB_MINION_CONF_DIR).joinpath( - "minion" - ) - if filename == "mm_master": - return pathlib.PurePath(RUNTIME_VARS.TMP_MM_CONF_DIR).joinpath("master") - if filename == "mm_sub_master": - return pathlib.PurePath(RUNTIME_VARS.TMP_MM_SUB_CONF_DIR).joinpath("master") - if filename == "mm_minion": - return pathlib.PurePath(RUNTIME_VARS.TMP_MM_MINION_CONF_DIR).joinpath( - "minion" - ) - if filename == "mm_sub_minion": - return pathlib.PurePath(RUNTIME_VARS.TMP_MM_SUB_MINION_CONF_DIR).joinpath( - "minion" - ) - return pathlib.PurePath(RUNTIME_VARS.TMP_CONF_DIR).joinpath(filename) + return str(_salt_minion_factory_config_path) + + return str(_salt_master_factory_config_path) @property def master_opts(self): @@ -342,7 +291,10 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] self.opts = self.get_temp_config( - "master", gitfs_remotes=gitfs_remotes, verified_gitfs_provider="mocked" + "master", + _factory_root_dir, + gitfs_remotes=gitfs_remotes, + verified_gitfs_provider="mocked", ) self.main_class = salt.utils.gitfs.GitFS( self.opts, @@ -352,6 +304,7 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): git_providers=git_providers, ) + # DGM TBD do we need this, look at removing def tearDown(self): # Providers are preserved with GitFS's instance_map for remote in self.main_class.remotes: @@ -394,9 +347,10 @@ def test_get_cachedir_basename(main_class): assert main_class.remotes[1].get_cache_basename() == "_" -def test_git_provider_mp_lock(main_class): +def test_git_provider_mp_lock_and_clear_lock(main_class): """ Check that lock is released after provider.lock() + and that lock is released after provider.clear_lock() """ provider = main_class.remotes[0] provider.lock() @@ -404,12 +358,6 @@ def test_git_provider_mp_lock(main_class): assert provider._master_lock.acquire(timeout=5) provider._master_lock.release() - -def test_git_provider_mp_clear_lock(main_class): - """ - Check that lock is released after provider.clear_lock() - """ - provider = main_class.remotes[0] provider.clear_lock() # check that lock has been released assert provider._master_lock.acquire(timeout=5) @@ -454,19 +402,168 @@ def test_git_provider_mp_gen_lock(main_class, caplog): """ Check that gen_lock is obtains lock, and then releases, provider.lock() """ - test_msg1 = "Set update lock for gitfs remote 'file://repo1.git' on machine_id" + # DGM try getting machine_identifier + # get machine_identifier + mach_id = salt.utils.files.get_machine_identifier() + cur_pid = os.getpid() + + test_msg1 = ( + f"Set update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" + ) test_msg2 = "Attempting to remove 'update' lock for 'gitfs' remote 'file://repo1.git' due to lock_set1 'True' or lock_set2" - test_msg3 = "Removed update lock for gitfs remote 'file://repo1.git' on machine_id" + test_msg3 = f"Removed update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" provider = main_class.remotes[0] + + # loop seeing if the test can be made to mess up a lock/unlock sequence + max_count = 10000 + count = 0 + while count < max_count: + count = count + 1 + caplog.clear() + with caplog.at_level(logging.DEBUG): + provider.fetch() + + assert test_msg1 in caplog.text + assert test_msg2 in caplog.text + assert test_msg3 in caplog.text + + +@pytest.mark.slow_test +@pytest.mark.timeout_unless_on_windows(120) +def test_git_provider_mp_lock_dead_pid(main_class, caplog): + """ + Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id + """ + # DGM try getting machine_identifier + # get machine_identifier + mach_id = salt.utils.files.get_machine_identifier() + cur_pid = os.getpid() + + test_msg1 = ( + f"Set update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" + ) + test_msg3 = f"Removed update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" + + provider = main_class.remotes[0] + provider.lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + + # get lock file and manipulate it for a dead pid + file_name = provider._get_lock_file("update") + dead_pid = 1234 # give it non-existant pid + test_msg2 = ( + f"gitfs_global_lock is enabled and update lockfile {file_name} " + "is present for gitfs remote 'file://repo1.git' on machine_id " + f"{mach_id} with pid '{dead_pid}'. Process {dead_pid} obtained " + f"the lock for machine_id {mach_id}, current machine_id {mach_id} " + "but this process is not running. The update may have been " + "interrupted. Given this process is for the same machine the " + "lock will be reallocated to new process" + ) + + # remove existing lock file and write fake lock file with bad pid + assert pathlib.Path(file_name).is_file() + pathlib.Path(file_name).unlink() + + try: + # write lock file similar to salt/utils/gitfs.py + fh_ = os.open(file_name, os.O_CREAT | os.O_EXCL | os.O_WRONLY) + with os.fdopen(fh_, "wb"): + # Write the lock file and close the filehandle + os.write(fh_, salt.utils.stringutils.to_bytes(str(dead_pid))) + os.write(fh_, salt.utils.stringutils.to_bytes("\n")) + os.write(fh_, salt.utils.stringutils.to_bytes(str(mach_id))) + os.write(fh_, salt.utils.stringutils.to_bytes("\n")) + + except OSError as exc: + log.error( + "Failed to write fake dead pid lock file %s, exception %s", file_name, exc + ) + + provider._master_lock.release() + with caplog.at_level(logging.DEBUG): - provider.fetch() + provider.lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() + + provider.clear_lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() assert test_msg1 in caplog.text assert test_msg2 in caplog.text assert test_msg3 in caplog.text +@pytest.mark.slow_test +@pytest.mark.timeout_unless_on_windows(120) +def test_git_provider_mp_lock_bad_machine(main_class, caplog): + """ + Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id + """ + # DGM try getting machine_identifier + # get machine_identifier + mach_id = salt.utils.files.get_machine_identifier() + cur_pid = os.getpid() + + provider = main_class.remotes[0] + provider.lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + + # get lock file and manipulate it for a dead pid + file_name = provider._get_lock_file("update") + bad_mach_id = "abcedf0123456789" # give it non-existant pid + + test_msg1 = ( + f"gitfs_global_lock is enabled and update lockfile {file_name} " + "is present for gitfs remote 'file://repo1.git' on machine_id " + f"{mach_id} with pid '{cur_pid}'. Process {cur_pid} obtained " + f"the lock for machine_id {bad_mach_id}, current machine_id {mach_id}" + ) + test_msg2 = f"Removed update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" + + # remove existing lock file and write fake lock file with bad pid + assert pathlib.Path(file_name).is_file() + pathlib.Path(file_name).unlink() + + try: + # write lock file similar to salt/utils/gitfs.py + fh_ = os.open(file_name, os.O_CREAT | os.O_EXCL | os.O_WRONLY) + with os.fdopen(fh_, "wb"): + # Write the lock file and close the filehandle + os.write(fh_, salt.utils.stringutils.to_bytes(str(cur_pid))) + os.write(fh_, salt.utils.stringutils.to_bytes("\n")) + os.write(fh_, salt.utils.stringutils.to_bytes(str(bad_mach_id))) + os.write(fh_, salt.utils.stringutils.to_bytes("\n")) + + except OSError as exc: + log.error( + "Failed to write fake dead pid lock file %s, exception %s", file_name, exc + ) + + provider._master_lock.release() + + with caplog.at_level(logging.DEBUG): + provider.lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() + + provider.clear_lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() + + assert test_msg1 in caplog.text + assert test_msg2 in caplog.text + + class KillProcessTest(salt.utils.process.SignalHandlingProcess): """ Test process for which to kill and check lock resources are cleaned up @@ -482,16 +579,10 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): """ Start the test process to kill """ - log.debug("DGM kill_test_process entry pid %s", os.getpid()) - - ## provider = main_class.remotes[0] self.provider.lock() - log.debug("DGM kill_test_process obtained lock") - # check that lock has been released assert self.provider._master_lock.acquire(timeout=5) - log.debug("DGM kill_test_process tested assert masterlock acquire") while True: tsleep = 1 @@ -507,16 +598,12 @@ def test_git_provider_sigterm_cleanup(main_class, caplog): Start process which will obtain lock, and leave it locked then kill the process via SIGTERM and ensure locked resources are cleaned up """ - log.debug("DGM test_git_provider_sigterm_cleanup entry") - provider = main_class.remotes[0] - log.debug("DGM test_git_provider_sigterm_cleanup, get procmgn and add process") with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): procmgr = salt.utils.process.ProcessManager(wait_for_kill=1) proc = procmgr.add_process(KillProcessTest, args=(provider,), name="test_kill") - log.debug("DGM test_git_provider_sigterm_cleanup, check if process is alive") while not proc.is_alive(): time.sleep(1) # give some time for it to be started @@ -524,27 +611,13 @@ def test_git_provider_sigterm_cleanup(main_class, caplog): # child process should be alive file_name = provider._get_lock_file("update") - dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}'" - log.debug(dbg_msg) assert pathlib.Path(file_name).exists() assert pathlib.Path(file_name).is_file() - dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', exists and is a file, send SIGTERM signal" - log.debug(dbg_msg) - procmgr.terminate() # sends a SIGTERM time.sleep(1) # give some time for it to terminate - log.debug("DGM test_git_provider_sigterm_cleanup lock , post terminate") assert not proc.is_alive() - - dbg_msg = "DGM test_git_provider_sigterm_cleanup lock , child is not alive" - log.debug(dbg_msg) - - test_file_exits = pathlib.Path(file_name).exists() - dbg_msg = f"DGM test_git_provider_sigterm_cleanup lock file location, '{file_name}', does it exist anymore '{test_file_exits}'" - log.debug(dbg_msg) - assert not pathlib.Path(file_name).exists() From 695353ee91989956f9006ae3e24e2094739285aa Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 7 Mar 2024 16:24:54 -0700 Subject: [PATCH 024/160] Updated to use grains/core.py get_machine_id for machine identifier --- salt/utils/files.py | 22 ++++++++++---------- salt/utils/gitfs.py | 4 +++- salt/utils/process.py | 4 +++- tests/pytests/unit/utils/test_gitfs_locks.py | 10 ++++++--- 4 files changed, 24 insertions(+), 16 deletions(-) diff --git a/salt/utils/files.py b/salt/utils/files.py index 5f89e6aba98..d735013da88 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -907,14 +907,14 @@ def get_encoding(path): ## TBD DGM just parking here till final machine identifier work is done -def get_machine_identifier(): - """ - Provide the machine-identifier for machine/virtualization combination - """ - locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] - existing_locations = [loc for loc in locations if os.path.exists(loc)] - if not existing_locations: - return "" - else: - with fopen(existing_locations[0]) as machineid: - return machineid.read().strip() +## def get_machine_identifier(): +## """ +## Provide the machine-identifier for machine/virtualization combination +## """ +## locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] +## existing_locations = [loc for loc in locations if os.path.exists(loc)] +## if not existing_locations: +## return "" +## else: +## with fopen(existing_locations[0]) as machineid: +## return machineid.read().strip() diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index defea96d426..4fef2925dfb 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -39,6 +39,7 @@ import salt.utils.versions from salt.config import DEFAULT_HASH_TYPE from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS from salt.exceptions import FileserverConfigError, GitLockError, get_error_message +from salt.grains.core import get_machine_id from salt.utils.event import tagify from salt.utils.odict import OrderedDict from salt.utils.process import os_is_running as pid_exists @@ -250,7 +251,8 @@ class GitProvider: # DGM try getting machine_identifier # get machine_identifier - self.mach_id = salt.utils.files.get_machine_identifier() + ## self.mach_id = salt.utils.files.get_machine_identifier() + self.mach_id = get_machine_id().get("machine_id", "no_machine_id_available") log.debug("DGM machine_id for lock file, machine_id %s", self.mach_id) self.global_saltenv = salt.utils.data.repack_dictlist( diff --git a/salt/utils/process.py b/salt/utils/process.py index b49a907ee93..80c3a65cae9 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -30,6 +30,7 @@ import salt.utils.path import salt.utils.platform import salt.utils.versions from salt.ext.tornado import gen +from salt.grains.core import get_machine_id log = logging.getLogger(__name__) @@ -1075,7 +1076,8 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - mach_id = salt.utils.files.get_machine_identifier() + ## mach_id = salt.utils.files.get_machine_identifier() + mach_id = get_machine_id().get("machine_id", "no_machine_id_available") dbg_msg = ( f"exiting for process id '{os.getpid()}' and machine identifer '{mach_id}'" ) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index b5eb1fe40a7..4cb5539e57f 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -20,6 +20,7 @@ import salt.utils.gitfs import salt.utils.path import salt.utils.platform import salt.utils.process +from salt.grains.core import get_machine_id from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env @@ -404,7 +405,8 @@ def test_git_provider_mp_gen_lock(main_class, caplog): """ # DGM try getting machine_identifier # get machine_identifier - mach_id = salt.utils.files.get_machine_identifier() + ## mach_id = salt.utils.files.get_machine_identifier() + mach_id = get_machine_id().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() test_msg1 = ( @@ -437,7 +439,8 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): """ # DGM try getting machine_identifier # get machine_identifier - mach_id = salt.utils.files.get_machine_identifier() + ## mach_id = salt.utils.files.get_machine_identifier() + mach_id = get_machine_id().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() test_msg1 = ( @@ -508,7 +511,8 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): """ # DGM try getting machine_identifier # get machine_identifier - mach_id = salt.utils.files.get_machine_identifier() + ## mach_id = salt.utils.files.get_machine_identifier() + mach_id = get_machine_id().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() provider = main_class.remotes[0] From 21cf213b107345214c04e55a929c8702ec3c51d1 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 12 Mar 2024 13:30:44 -0600 Subject: [PATCH 025/160] Cleaned up tests and removal of debug statements --- salt/utils/gitfs.py | 3 - salt/utils/process.py | 6 +- tests/pytests/unit/utils/test_gitfs_locks.py | 166 ++++++++----------- 3 files changed, 71 insertions(+), 104 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 4fef2925dfb..8949ada51ef 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -249,11 +249,8 @@ class GitProvider: def _val_cb(x, y): return str(y) - # DGM try getting machine_identifier # get machine_identifier - ## self.mach_id = salt.utils.files.get_machine_identifier() self.mach_id = get_machine_id().get("machine_id", "no_machine_id_available") - log.debug("DGM machine_id for lock file, machine_id %s", self.mach_id) self.global_saltenv = salt.utils.data.repack_dictlist( self.opts.get(f"{self.role}_saltenv", []), diff --git a/salt/utils/process.py b/salt/utils/process.py index 80c3a65cae9..16e6df514e8 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1076,12 +1076,10 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - ## mach_id = salt.utils.files.get_machine_identifier() mach_id = get_machine_id().get("machine_id", "no_machine_id_available") - dbg_msg = ( - f"exiting for process id '{os.getpid()}' and machine identifer '{mach_id}'" + log.debug( + "exiting for process id %s and machine identifer %s", os.getpid(), mach_id ) - log.debug(dbg_msg) cur_pid = os.getpid() if HAS_PSUTIL: diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 4cb5539e57f..5040117402f 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -7,7 +7,6 @@ import logging import os import pathlib import signal -import tempfile import time import pytest @@ -41,44 +40,6 @@ def _get_user(): return pwd.getpwuid(os.getuid())[0] -@pytest.fixture(scope="module", autouse=True) -def _factory_root_dir(salt_factories): - return salt_factories.root_dir.resolve() - - -@pytest.fixture(scope="module", autouse=True) -def _salt_master_factory_config_parent(salt_master_factory): - return pathlib.PurePath(salt_master_factory.config["conf_file"]).parent - - -@pytest.fixture(scope="module", autouse=True) -def _salt_master_factory_config_path(salt_master_factory): - return pathlib.PurePath(salt_master_factory.config["conf_file"]).parent.joinpath( - "master" - ) - - -@pytest.fixture(scope="module", autouse=True) -def _salt_minion_factory_config_path(salt_minion_factory): - return pathlib.PurePath(salt_minion_factory.config["conf_file"]).parent.joinpath( - "minion" - ) - - -@pytest.fixture(scope="module", autouse=True) -def _create_old_tempdir(_factory_root_dir): - return pathlib.Path(str(_factory_root_dir)).mkdir(exist_ok=True, parents=True) - - -## @pytest.fixture -## def get_tmp_dir(tmp_path): -## dirpath = tmp_path / "git_test" -## dirpath.mkdir(parents=True) -## return dirpath -## -## ## dirpath.cleanup() - - def _clear_instance_map(): try: del salt.utils.gitfs.GitFS.instance_map[ @@ -90,30 +51,41 @@ def _clear_instance_map(): class AdaptedConfigurationTestCaseMixin: - @staticmethod - def get_temp_config(config_for, _factory_root_dir, **config_overrides): + def __init__( + self, + salt_factories, + salt_master_factory, + salt_minion_factory, + tmp_path, + ): + self._tmp_name = str(tmp_path) - rootdir = config_overrides.get("root_dir", str(_factory_root_dir)) + self._master_cfg = str(salt_master_factory.config["conf_file"]) + self._minion_cfg = str(salt_minion_factory.config["conf_file"]) + self._root_dir = str(salt_factories.root_dir.resolve()) + self._user = _get_user() + + def get_temp_config(self, config_for, **config_overrides): + + rootdir = config_overrides.get("root_dir", self._root_dir) if not pathlib.Path(rootdir).exists(): - pathlib.Path(str(_factory_root_dir)).mkdir(exist_ok=True, parents=True) + pathlib.Path(rootdir).mkdir(exist_ok=True, parents=True) - rootdir = config_overrides.get("root_dir", str(_factory_root_dir)) conf_dir = config_overrides.pop( "conf_dir", str(pathlib.PurePath(rootdir).joinpath("conf")) ) - curr_user = _get_user() for key in ("cachedir", "pki_dir", "sock_dir"): if key not in config_overrides: config_overrides[key] = key if "log_file" not in config_overrides: config_overrides["log_file"] = f"logs/{config_for}.log".format() if "user" not in config_overrides: - config_overrides["user"] = curr_user + config_overrides["user"] = self._user config_overrides["root_dir"] = rootdir - cdict = AdaptedConfigurationTestCaseMixin.get_config( + cdict = self.get_config( config_for, from_scratch=True, ) @@ -149,7 +121,7 @@ class AdaptedConfigurationTestCaseMixin: rdict["sock_dir"], conf_dir, ], - curr_user, + self._user, root_dir=rdict["root_dir"], ) @@ -158,31 +130,25 @@ class AdaptedConfigurationTestCaseMixin: salt.utils.yaml.safe_dump(rdict, wfh, default_flow_style=False) return rdict - @staticmethod def get_config( + self, config_for, from_scratch=False, ): if from_scratch: if config_for in ("master"): - return salt.config.master_config(str(_salt_master_factory_config_path)) + return salt.config.master_config(self._master_cfg) elif config_for in ("minion"): - return salt.config.minion_config(str(_salt_minion_factory_config_path)) + return salt.config.minion_config(self._minion_cfg) elif config_for == "client_config": - return salt.config_client_config(str(_salt_master_factory_config_path)) + return salt.config_client_config(self._master_cfg) if config_for not in ("master", "minion", "client_config"): if config_for in ("master"): - return freeze( - salt.config.master_config(str(_salt_master_factory_config_path)) - ) + return freeze(salt.config.master_config(self._master_cfg)) elif config_for in ("minion"): - return freeze( - salt.config.minion_config(str(_salt_minion_factory_config_path)) - ) + return freeze(salt.config.minion_config(self._minion_cfg)) elif config_for == "client_config": - return freeze( - salt.config.client_config(str(_salt_master_factory_config_path)) - ) + return freeze(salt.config.client_config(self._master_cfg)) log.error( "Should not reach this section of code for get_config, missing support for input config_for %s", @@ -190,25 +156,24 @@ class AdaptedConfigurationTestCaseMixin: ) # at least return master's config - return freeze(salt.config.master_config(str(_salt_master_factory_config_path))) + return freeze(salt.config.master_config(self._master_cfg)) @property def config_dir(self): - return str(_salt_master_factory_config_parent) + return str(pathlib.PurePath(self._master_cfg).parent) def get_config_dir(self): log.warning("Use the config_dir attribute instead of calling get_config_dir()") return self.config_dir - @staticmethod - def get_config_file_path(filename): + def get_config_file_path(self, filename): if filename == "master": - return str(_salt_master_factory_config_path) + return str(self._master_cfg) if filename == "minion": - return str(_salt_minion_factory_config_path) + return str(self._minion_cfg) - return str(_salt_master_factory_config_path) + return str(self._master_cfg) @property def master_opts(self): @@ -224,26 +189,28 @@ class AdaptedConfigurationTestCaseMixin: """ return self.get_config("minion") - @property - def sub_minion_opts(self): - """ - Return the options used for the sub_minion - """ - return self.get_config("sub_minion") - -class TestGitBase(AdaptedConfigurationTestCaseMixin): +class MyGitBase(AdaptedConfigurationTestCaseMixin): """ mocked GitFS provider leveraging tmp_path """ def __init__( self, + salt_factories, + salt_master_factory, + salt_minion_factory, + tmp_path, ): - ## self._tmp_dir = pathlib.Path(tmp_path / "git_test").mkdir(exist_ok=True, parents=True) - ## tmp_name = str(self._tmp_dir) - self._tmp_dir = tempfile.TemporaryDirectory() - tmp_name = self._tmp_dir.name + super().__init__( + salt_factories, + salt_master_factory, + salt_minion_factory, + tmp_path, + ) + + tmp_name = self._tmp_name.join("/git_test") + pathlib.Path(tmp_name).mkdir(exist_ok=True, parents=True) class MockedProvider( salt.utils.gitfs.GitProvider @@ -293,7 +260,6 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): self.opts = self.get_temp_config( "master", - _factory_root_dir, gitfs_remotes=gitfs_remotes, verified_gitfs_provider="mocked", ) @@ -305,21 +271,29 @@ class TestGitBase(AdaptedConfigurationTestCaseMixin): git_providers=git_providers, ) - # DGM TBD do we need this, look at removing - def tearDown(self): + def cleanup(self): # Providers are preserved with GitFS's instance_map for remote in self.main_class.remotes: remote.fetched = False del self.main_class - ## self._tmp_dir.cleanup() @pytest.fixture -def main_class(tmp_path): - test_git_base = TestGitBase() - yield test_git_base.main_class +def main_class( + salt_factories, + salt_master_factory, + salt_minion_factory, + tmp_path, +): + my_git_base = MyGitBase( + salt_factories, + salt_master_factory, + salt_minion_factory, + tmp_path, + ) + yield my_git_base.main_class - test_git_base.tearDown() + my_git_base.cleanup() def test_update_all(main_class): @@ -403,9 +377,7 @@ def test_git_provider_mp_gen_lock(main_class, caplog): """ Check that gen_lock is obtains lock, and then releases, provider.lock() """ - # DGM try getting machine_identifier # get machine_identifier - ## mach_id = salt.utils.files.get_machine_identifier() mach_id = get_machine_id().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() @@ -430,6 +402,8 @@ def test_git_provider_mp_gen_lock(main_class, caplog): assert test_msg2 in caplog.text assert test_msg3 in caplog.text + caplog.clear() + @pytest.mark.slow_test @pytest.mark.timeout_unless_on_windows(120) @@ -437,9 +411,7 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): """ Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ - # DGM try getting machine_identifier # get machine_identifier - ## mach_id = salt.utils.files.get_machine_identifier() mach_id = get_machine_id().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() @@ -487,6 +459,7 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): provider._master_lock.release() + caplog.clear() with caplog.at_level(logging.DEBUG): provider.lock() # check that lock has been released @@ -501,6 +474,7 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): assert test_msg1 in caplog.text assert test_msg2 in caplog.text assert test_msg3 in caplog.text + caplog.clear() @pytest.mark.slow_test @@ -509,9 +483,7 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): """ Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ - # DGM try getting machine_identifier # get machine_identifier - ## mach_id = salt.utils.files.get_machine_identifier() mach_id = get_machine_id().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() @@ -553,6 +525,7 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): provider._master_lock.release() + caplog.clear() with caplog.at_level(logging.DEBUG): provider.lock() # check that lock has been released @@ -566,6 +539,7 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): assert test_msg1 in caplog.text assert test_msg2 in caplog.text + caplog.clear() class KillProcessTest(salt.utils.process.SignalHandlingProcess): @@ -592,12 +566,10 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): tsleep = 1 time.sleep(tsleep) # give time for kill by sigterm - log.debug("DGM kill_test_process exit") - @pytest.mark.slow_test @pytest.mark.skip_unless_on_linux -def test_git_provider_sigterm_cleanup(main_class, caplog): +def test_git_provider_sigterm_cleanup(main_class): """ Start process which will obtain lock, and leave it locked then kill the process via SIGTERM and ensure locked resources are cleaned up From 37164221e5910144b2fe827b9644401e4f05096f Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 12 Mar 2024 16:32:36 -0600 Subject: [PATCH 026/160] Removed commented out debug code --- salt/utils/files.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/salt/utils/files.py b/salt/utils/files.py index d735013da88..e5494911c28 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -904,17 +904,3 @@ def get_encoding(path): return "ASCII" raise CommandExecutionError("Could not detect file encoding") - - -## TBD DGM just parking here till final machine identifier work is done -## def get_machine_identifier(): -## """ -## Provide the machine-identifier for machine/virtualization combination -## """ -## locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] -## existing_locations = [loc for loc in locations if os.path.exists(loc)] -## if not existing_locations: -## return "" -## else: -## with fopen(existing_locations[0]) as machineid: -## return machineid.read().strip() From f98b7073cd73f9ee5203734815ad4a2584c8a235 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 19 Mar 2024 18:07:22 -0600 Subject: [PATCH 027/160] Moved location for finding machine_id to salt utils files, and had grains import from there --- salt/grains/core.py | 8 +------- salt/utils/files.py | 15 +++++++++++++++ salt/utils/process.py | 5 +++-- tests/pytests/unit/utils/test_gitfs_locks.py | 13 +++++++++---- 4 files changed, 28 insertions(+), 13 deletions(-) diff --git a/salt/grains/core.py b/salt/grains/core.py index 17a1d683eef..9a9963c0ba9 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -3048,13 +3048,7 @@ def get_machine_id(): if platform.system() == "AIX": return _aix_get_machine_id() - locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] - existing_locations = [loc for loc in locations if os.path.exists(loc)] - if not existing_locations: - return {} - else: - with salt.utils.files.fopen(existing_locations[0]) as machineid: - return {"machine_id": machineid.read().strip()} + return salt.utils.files.local_get_machine_id() def cwd(): diff --git a/salt/utils/files.py b/salt/utils/files.py index e5494911c28..742b4ab065e 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -904,3 +904,18 @@ def get_encoding(path): return "ASCII" raise CommandExecutionError("Could not detect file encoding") + + +def local_get_machine_id(): + """ + Provide the machine-id for machine/virtualization combination + """ + # Provides: + # machine-id + locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] + existing_locations = [loc for loc in locations if os.path.exists(loc)] + if not existing_locations: + return {} + else: + with salt.utils.files.fopen(existing_locations[0]) as machineid: + return {"machine_id": machineid.read().strip()} diff --git a/salt/utils/process.py b/salt/utils/process.py index 16e6df514e8..0f4ec7b2468 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -30,7 +30,6 @@ import salt.utils.path import salt.utils.platform import salt.utils.versions from salt.ext.tornado import gen -from salt.grains.core import get_machine_id log = logging.getLogger(__name__) @@ -1076,7 +1075,9 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - mach_id = get_machine_id().get("machine_id", "no_machine_id_available") + mach_id = salt.utils.files.local_get_machine_id().get( + "machine_id", "no_machine_id_available" + ) log.debug( "exiting for process id %s and machine identifer %s", os.getpid(), mach_id ) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 5040117402f..642d0dafc2d 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -19,7 +19,6 @@ import salt.utils.gitfs import salt.utils.path import salt.utils.platform import salt.utils.process -from salt.grains.core import get_machine_id from salt.utils.immutabletypes import freeze from salt.utils.verify import verify_env @@ -378,7 +377,9 @@ def test_git_provider_mp_gen_lock(main_class, caplog): Check that gen_lock is obtains lock, and then releases, provider.lock() """ # get machine_identifier - mach_id = get_machine_id().get("machine_id", "no_machine_id_available") + mach_id = salt.utils.files.local_get_machine_id().get( + "machine_id", "no_machine_id_available" + ) cur_pid = os.getpid() test_msg1 = ( @@ -412,7 +413,9 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ # get machine_identifier - mach_id = get_machine_id().get("machine_id", "no_machine_id_available") + mach_id = salt.utils.files.local_get_machine_id().get( + "machine_id", "no_machine_id_available" + ) cur_pid = os.getpid() test_msg1 = ( @@ -484,7 +487,9 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ # get machine_identifier - mach_id = get_machine_id().get("machine_id", "no_machine_id_available") + mach_id = salt.utils.files.local_get_machine_id().get( + "machine_id", "no_machine_id_available" + ) cur_pid = os.getpid() provider = main_class.remotes[0] From 0de790cd28d106a14a2a8d2e0566f9cf04018ed7 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 20 Mar 2024 11:39:14 -0600 Subject: [PATCH 028/160] Ensured use of get_machine_identifier from salt.utils.files for identifying a machine id --- salt/grains/core.py | 2 +- salt/utils/files.py | 2 +- salt/utils/process.py | 2 +- tests/pytests/unit/utils/test_gitfs_locks.py | 6 +++--- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/salt/grains/core.py b/salt/grains/core.py index 9a9963c0ba9..f51c36d9d2d 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -3048,7 +3048,7 @@ def get_machine_id(): if platform.system() == "AIX": return _aix_get_machine_id() - return salt.utils.files.local_get_machine_id() + return salt.utils.files.get_machine_identifier() def cwd(): diff --git a/salt/utils/files.py b/salt/utils/files.py index 742b4ab065e..3111dd2aec2 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -906,7 +906,7 @@ def get_encoding(path): raise CommandExecutionError("Could not detect file encoding") -def local_get_machine_id(): +def get_machine_identifier(): """ Provide the machine-id for machine/virtualization combination """ diff --git a/salt/utils/process.py b/salt/utils/process.py index 0f4ec7b2468..fbf6b4f9a59 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1075,7 +1075,7 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - mach_id = salt.utils.files.local_get_machine_id().get( + mach_id = salt.utils.files.get_machine_identifier().get( "machine_id", "no_machine_id_available" ) log.debug( diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 642d0dafc2d..ceefd7da2e9 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -377,7 +377,7 @@ def test_git_provider_mp_gen_lock(main_class, caplog): Check that gen_lock is obtains lock, and then releases, provider.lock() """ # get machine_identifier - mach_id = salt.utils.files.local_get_machine_id().get( + mach_id = salt.utils.files.get_machine_identifier().get( "machine_id", "no_machine_id_available" ) cur_pid = os.getpid() @@ -413,7 +413,7 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ # get machine_identifier - mach_id = salt.utils.files.local_get_machine_id().get( + mach_id = salt.utils.files.get_machine_identifier().get( "machine_id", "no_machine_id_available" ) cur_pid = os.getpid() @@ -487,7 +487,7 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ # get machine_identifier - mach_id = salt.utils.files.local_get_machine_id().get( + mach_id = salt.utils.files.get_machine_identifier().get( "machine_id", "no_machine_id_available" ) cur_pid = os.getpid() From 8fb0127f995ba3600a57cf073dfa15aeec3e1284 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 20 Mar 2024 17:24:46 -0600 Subject: [PATCH 029/160] Allow for machine_id key when getting the machine_identifier --- tests/pytests/functional/utils/gitfs/test_gitfs.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/utils/gitfs/test_gitfs.py b/tests/pytests/functional/utils/gitfs/test_gitfs.py index abe050fa811..a1bc7574822 100644 --- a/tests/pytests/functional/utils/gitfs/test_gitfs.py +++ b/tests/pytests/functional/utils/gitfs/test_gitfs.py @@ -249,7 +249,9 @@ def _test_lock(opts): g.fetch_remotes() assert len(g.remotes) == 1 repo = g.remotes[0] - mach_id = salt.utils.files.get_machine_identifier() + mach_id = salt.utils.files.get_machine_identifier().get( + "machine_id", "no_machine_id_available" + ) assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ From 6a2fb70d59ee969cbe3e133d7bbd3967cba5ea65 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 21 Mar 2024 16:27:22 -0600 Subject: [PATCH 030/160] Updated tests to release lock with try/except/finally --- tests/pytests/unit/utils/test_gitfs_locks.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index ceefd7da2e9..58e61998aa6 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -460,7 +460,8 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): "Failed to write fake dead pid lock file %s, exception %s", file_name, exc ) - provider._master_lock.release() + finally: + provider._master_lock.release() caplog.clear() with caplog.at_level(logging.DEBUG): @@ -528,7 +529,8 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): "Failed to write fake dead pid lock file %s, exception %s", file_name, exc ) - provider._master_lock.release() + finally: + provider._master_lock.release() caplog.clear() with caplog.at_level(logging.DEBUG): @@ -563,6 +565,8 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): Start the test process to kill """ self.provider.lock() + lockfile = self.provider._get_lock_file() + log.debug("KillProcessTest acquried lock file %s", lockfile) # check that lock has been released assert self.provider._master_lock.acquire(timeout=5) @@ -574,6 +578,7 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): @pytest.mark.slow_test @pytest.mark.skip_unless_on_linux +@pytest.mark.timeout_unless_on_windows(120) def test_git_provider_sigterm_cleanup(main_class): """ Start process which will obtain lock, and leave it locked From 152811fcc62decd3207391fc8209b9efe9a0afcb Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 21 Mar 2024 16:46:49 -0600 Subject: [PATCH 031/160] Altered procmgr.run to run asynchronously --- tests/pytests/unit/utils/test_gitfs_locks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 58e61998aa6..1dcc9d8dea6 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -593,7 +593,7 @@ def test_git_provider_sigterm_cleanup(main_class): while not proc.is_alive(): time.sleep(1) # give some time for it to be started - procmgr.run() + procmgr.run(asynchronous=True) # child process should be alive file_name = provider._get_lock_file("update") From b2c2c5a9cd4a1345ae5ab398bff58f096dacf7e5 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 25 Mar 2024 10:51:29 -0600 Subject: [PATCH 032/160] Moved code under with caplog set to DEBUG to ensure all tests correct --- tests/pytests/unit/utils/test_gitfs_locks.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 1dcc9d8dea6..266c406f0bd 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -470,10 +470,10 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): assert provider._master_lock.acquire(timeout=5) provider._master_lock.release() - provider.clear_lock() - # check that lock has been released - assert provider._master_lock.acquire(timeout=5) - provider._master_lock.release() + provider.clear_lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() assert test_msg1 in caplog.text assert test_msg2 in caplog.text @@ -539,10 +539,10 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): assert provider._master_lock.acquire(timeout=5) provider._master_lock.release() - provider.clear_lock() - # check that lock has been released - assert provider._master_lock.acquire(timeout=5) - provider._master_lock.release() + provider.clear_lock() + # check that lock has been released + assert provider._master_lock.acquire(timeout=5) + provider._master_lock.release() assert test_msg1 in caplog.text assert test_msg2 in caplog.text From 20854d63661117a99fd9b296cc34fddfbd49a696 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 25 Mar 2024 14:21:37 -0600 Subject: [PATCH 033/160] Allow some time for processes to get started after processmanager.run() command --- tests/pytests/unit/utils/test_gitfs_locks.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 266c406f0bd..0e2f7241a60 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -595,6 +595,8 @@ def test_git_provider_sigterm_cleanup(main_class): procmgr.run(asynchronous=True) + time.sleep(2) # give some time for it to terminate + # child process should be alive file_name = provider._get_lock_file("update") From 9d5975f64e309ac60c7f17cbd42732eed1956b6f Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 27 Mar 2024 13:16:01 -0600 Subject: [PATCH 034/160] Ensure SIGTERM tests etc only run on Linux --- tests/pytests/unit/utils/test_gitfs_locks.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 0e2f7241a60..68f4d039c82 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -27,6 +27,8 @@ try: except ImportError: import salt.utils.win_functions +pytestmark = [pytest.mark.skip_unless_on_linux] + log = logging.getLogger(__name__) From 17fb9f6c312e278d38d92e13bcaf12958b693b80 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 28 Mar 2024 09:24:27 -0600 Subject: [PATCH 035/160] Reworded error message for better English --- salt/utils/gitfs.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 8949ada51ef..6385b87d3a1 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -755,7 +755,7 @@ class GitProvider: except OSError as exc: if exc.errno == errno.ENOENT: # No lock file present - msg = f"Attempt to remove lock {self.url} for file ({lock_file}) which was not found to exist, exception : {exc} " + msg = f"Attempt to remove lock {self.url} for file ({lock_file}) which does not exist, exception : {exc} " log.debug(msg) elif exc.errno == errno.EISDIR: From 4a838b3adea2150e11d7c956227a81939d06e810 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 28 Mar 2024 10:19:44 -0600 Subject: [PATCH 036/160] Changes due to reviewer comments --- salt/utils/gitfs.py | 19 ++++++++++++------- salt/utils/process.py | 9 ++++++++- 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 6385b87d3a1..66e2496e6cf 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -39,7 +39,6 @@ import salt.utils.versions from salt.config import DEFAULT_HASH_TYPE from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS from salt.exceptions import FileserverConfigError, GitLockError, get_error_message -from salt.grains.core import get_machine_id from salt.utils.event import tagify from salt.utils.odict import OrderedDict from salt.utils.process import os_is_running as pid_exists @@ -250,7 +249,9 @@ class GitProvider: return str(y) # get machine_identifier - self.mach_id = get_machine_id().get("machine_id", "no_machine_id_available") + self.mach_id = salt.utils.files.get_machine_identifier().get( + "machine_id", "no_machine_id_available" + ) self.global_saltenv = salt.utils.data.repack_dictlist( self.opts.get(f"{self.role}_saltenv", []), @@ -945,10 +946,10 @@ class GitProvider: ) with os.fdopen(fh_, "wb"): # Write the lock file and close the filehandle - os.write(fh_, salt.utils.stringutils.to_bytes(str(os.getpid()))) - os.write(fh_, salt.utils.stringutils.to_bytes("\n")) - os.write(fh_, salt.utils.stringutils.to_bytes(str(self.mach_id))) - os.write(fh_, salt.utils.stringutils.to_bytes("\n")) + os.write( + fh_, + salt.utils.stringutils.to_bytes(f"{os.getpid()}\n{self.mach_id}\n"), + ) except OSError as exc: if exc.errno == errno.EEXIST: @@ -1136,7 +1137,11 @@ class GitProvider: continue finally: if lock_set1 or lock_set2: - msg = f"Attempting to remove '{lock_type}' lock for '{self.role}' remote '{self.id}' due to lock_set1 '{lock_set1}' or lock_set2 '{lock_set2}'" + msg = ( + f"Attempting to remove '{lock_type}' lock for " + f"'{self.role}' remote '{self.id}' due to lock_set1 " + f"'{lock_set1}' or lock_set2 '{lock_set2}'" + ) log.debug(msg) self.clear_lock(lock_type=lock_type) diff --git a/salt/utils/process.py b/salt/utils/process.py index fbf6b4f9a59..ec97cd5fc6f 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1137,7 +1137,14 @@ class SignalHandlingProcess(Process): if cur_pid == file_pid: if mach_id != file_mach_id: if not file_mach_id: - msg = f"gitfs lock file for pid '{file_pid}' does not contain a machine id, deleting lock file which may affect if using multi-master with shared gitfs cache, the lock may have been obtained by another master recommend updating Salt version on other masters to a version which insert machine identification in lock a file." + msg = ( + f"gitfs lock file for pid '{file_pid}' does not " + "contain a machine id, deleting lock file which may " + "affect if using multi-master with shared gitfs cache, " + "the lock may have been obtained by another master " + "recommend updating Salt version on other masters to a " + "version which insert machine identification in lock a file." + ) log.debug(msg) file_del_list.append( (file_name, file_pid, file_mach_id) From fcbb59cf17b3714f735805cf5c0aebadb2189d3a Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 28 Mar 2024 13:59:01 -0600 Subject: [PATCH 037/160] Updates due to reviewer comments --- salt/utils/files.py | 3 +- salt/utils/gitfs.py | 20 +++++++++--- salt/utils/process.py | 31 ++++++++++++++----- .../functional/utils/gitfs/test_gitfs.py | 10 ++++-- tests/pytests/unit/utils/test_gitfs_locks.py | 5 ++- 5 files changed, 53 insertions(+), 16 deletions(-) diff --git a/salt/utils/files.py b/salt/utils/files.py index 3111dd2aec2..5ff20f3db4a 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -381,7 +381,8 @@ def fopen(*args, **kwargs): # Workaround callers with bad buffering setting for binary files if kwargs.get("buffering") == 1 and "b" in kwargs.get("mode", ""): log.debug( - "Line buffering (buffering=1) isn't supported in binary mode, the default buffer size will be used" + "Line buffering (buffering=1) isn't supported in binary mode, " + "the default buffer size will be used" ) kwargs["buffering"] = io.DEFAULT_BUFFER_SIZE diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 66e2496e6cf..d2e2f12505b 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -756,7 +756,10 @@ class GitProvider: except OSError as exc: if exc.errno == errno.ENOENT: # No lock file present - msg = f"Attempt to remove lock {self.url} for file ({lock_file}) which does not exist, exception : {exc} " + msg = ( + f"Attempt to remove lock {self.url} for file ({lock_file}) " + f"which does not exist, exception : {exc} " + ) log.debug(msg) elif exc.errno == errno.EISDIR: @@ -770,7 +773,10 @@ class GitProvider: else: _add_error(failed, exc) else: - msg = f"Removed {lock_type} lock for {self.role} remote '{self.id}' on machine_id '{self.mach_id}'" + msg = ( + f"Removed {lock_type} lock for {self.role} remote '{self.id}' " + f"on machine_id '{self.mach_id}'" + ) log.debug(msg) success.append(msg) return success, failed @@ -975,8 +981,9 @@ class GitProvider: lock_file = self._get_lock_file(lock_type=lock_type) if self.opts[global_lock_key]: msg = ( - f"{global_lock_key} is enabled and {lock_type} lockfile {lock_file} is present for " - f"{self.role} remote '{self.id}' on machine_id {self.mach_id} with pid '{pid}'." + f"{global_lock_key} is enabled and {lock_type} lockfile {lock_file} " + f"is present for {self.role} remote '{self.id}' on machine_id " + f"{self.mach_id} with pid '{pid}'." ) if pid: msg += f" Process {pid} obtained the lock" @@ -1043,7 +1050,10 @@ class GitProvider: raise return else: - msg = f"Unable to set {lock_type} lock for {self.id} ({self._get_lock_file(lock_type)}) on machine_id {self.mach_id}: {exc}" + msg = ( + f"Unable to set {lock_type} lock for {self.id} " + f"({self._get_lock_file(lock_type)}) on machine_id {self.mach_id}: {exc}" + ) log.error(msg, exc_info=True) raise GitLockError(exc.errno, msg) diff --git a/salt/utils/process.py b/salt/utils/process.py index ec97cd5fc6f..6c1f755699d 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1099,8 +1099,9 @@ class SignalHandlingProcess(Process): os.getpid(), ) - # need to go through and clean up any resources left around like lock files if using gitfs - # example lockfile /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + # need to clean up any resources left around like lock files if using gitfs + # example: lockfile i + # /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) if cache_dir and gitfs_active: @@ -1131,7 +1132,7 @@ class SignalHandlingProcess(Process): ).rstrip() ) except ValueError: - # Lock file is empty, set mach_id to 0 so it evaluates as False. + # Lock file is empty, set mach_id to 0 so it evaluates False. file_mach_id = 0 if cur_pid == file_pid: @@ -1163,7 +1164,11 @@ class SignalHandlingProcess(Process): except OSError as exc: if exc.errno == errno.ENOENT: # No lock file present - msg = f"SIGTERM clean up of resources attempted to remove lock file {file_name}, pid '{file_pid}', machine identifier '{mach_id}' but it did not exist, exception : {exc} " + msg = ( + "SIGTERM clean up of resources attempted to remove lock " + f"file {file_name}, pid '{file_pid}', machine identifier " + f"'{mach_id}' but it did not exist, exception : {exc} " + ) log.debug(msg) elif exc.errno == errno.EISDIR: @@ -1173,13 +1178,25 @@ class SignalHandlingProcess(Process): try: shutil.rmtree(file_name) except OSError as exc: - msg = f"SIGTERM clean up of resources, lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}' was a directory, removed directory, exception : '{exc}'" + msg = ( + f"SIGTERM clean up of resources, lock file '{file_name}'" + f", pid '{file_pid}', machine identifier '{file_mach_id}'" + f"was a directory, removed directory, exception : '{exc}'" + ) log.debug(msg) else: - msg = f"SIGTERM clean up of resources, unable to remove lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}', exception : '{exc}'" + msg = ( + "SIGTERM clean up of resources, unable to remove lock file " + f"'{file_name}', pid '{file_pid}', machine identifier " + f"'{file_mach_id}', exception : '{exc}'" + ) log.debug(msg) else: - msg = f"SIGTERM clean up of resources, removed lock file '{file_name}', pid '{file_pid}', machine identifier '{file_mach_id}'" + msg = ( + "SIGTERM clean up of resources, removed lock file " + f"'{file_name}', pid '{file_pid}', machine identifier " + f"'{file_mach_id}'" + ) log.debug(msg) except psutil.NoSuchProcess: diff --git a/tests/pytests/functional/utils/gitfs/test_gitfs.py b/tests/pytests/functional/utils/gitfs/test_gitfs.py index a1bc7574822..5b9263f57e8 100644 --- a/tests/pytests/functional/utils/gitfs/test_gitfs.py +++ b/tests/pytests/functional/utils/gitfs/test_gitfs.py @@ -255,14 +255,20 @@ def _test_lock(opts): assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ - f"Set update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ( + f"Set update lock for gitfs remote " + f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ) ], [], ) assert os.path.isfile(repo._get_lock_file()) assert repo.clear_lock() == ( [ - f"Removed update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ( + f"Removed update lock for gitfs remote " + f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ) ], [], ) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 68f4d039c82..9cf58a67dbb 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -387,7 +387,10 @@ def test_git_provider_mp_gen_lock(main_class, caplog): test_msg1 = ( f"Set update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" ) - test_msg2 = "Attempting to remove 'update' lock for 'gitfs' remote 'file://repo1.git' due to lock_set1 'True' or lock_set2" + test_msg2 = ( + "Attempting to remove 'update' lock for 'gitfs' remote 'file://repo1.git' " + "due to lock_set1 'True' or lock_set2" + ) test_msg3 = f"Removed update lock for gitfs remote 'file://repo1.git' on machine_id '{mach_id}'" provider = main_class.remotes[0] From 848c7ad5bf13e6767add2372ea73c796e917eb4c Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 28 Mar 2024 14:13:38 -0600 Subject: [PATCH 038/160] Moved get_machine_identifier to salt utils platfrom as per reviewers request --- salt/grains/core.py | 3 ++- salt/utils/files.py | 15 --------------- salt/utils/gitfs.py | 3 ++- salt/utils/platform.py | 16 ++++++++++++++++ salt/utils/process.py | 5 ++--- .../pytests/functional/utils/gitfs/test_gitfs.py | 6 ++---- tests/pytests/unit/utils/test_gitfs_locks.py | 13 ++++--------- 7 files changed, 28 insertions(+), 33 deletions(-) diff --git a/salt/grains/core.py b/salt/grains/core.py index f51c36d9d2d..cc781a2ced9 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -39,6 +39,7 @@ import salt.utils.pkg.rpm import salt.utils.platform import salt.utils.stringutils from salt.utils.network import _clear_interfaces, _get_interfaces +from salt.utils.platform import get_machine_identifier as _get_machine_identifier from salt.utils.platform import linux_distribution as _linux_distribution try: @@ -3048,7 +3049,7 @@ def get_machine_id(): if platform.system() == "AIX": return _aix_get_machine_id() - return salt.utils.files.get_machine_identifier() + return _get_machine_identifier() def cwd(): diff --git a/salt/utils/files.py b/salt/utils/files.py index 5ff20f3db4a..acfe70f41a5 100644 --- a/salt/utils/files.py +++ b/salt/utils/files.py @@ -905,18 +905,3 @@ def get_encoding(path): return "ASCII" raise CommandExecutionError("Could not detect file encoding") - - -def get_machine_identifier(): - """ - Provide the machine-id for machine/virtualization combination - """ - # Provides: - # machine-id - locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] - existing_locations = [loc for loc in locations if os.path.exists(loc)] - if not existing_locations: - return {} - else: - with salt.utils.files.fopen(existing_locations[0]) as machineid: - return {"machine_id": machineid.read().strip()} diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index d2e2f12505b..ffaecfc97ff 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -41,6 +41,7 @@ from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS from salt.exceptions import FileserverConfigError, GitLockError, get_error_message from salt.utils.event import tagify from salt.utils.odict import OrderedDict +from salt.utils.platform import get_machine_identifier as _get_machine_identifier from salt.utils.process import os_is_running as pid_exists from salt.utils.versions import Version @@ -249,7 +250,7 @@ class GitProvider: return str(y) # get machine_identifier - self.mach_id = salt.utils.files.get_machine_identifier().get( + self.mach_id = _get_machine_identifier().get( "machine_id", "no_machine_id_available" ) diff --git a/salt/utils/platform.py b/salt/utils/platform.py index c6ca7fe8cae..667414b3f71 100644 --- a/salt/utils/platform.py +++ b/salt/utils/platform.py @@ -12,6 +12,7 @@ import sys import distro from salt.utils.decorators import memoize as real_memoize +from salt.utils.files import fopen as _fopen def linux_distribution(full_distribution_name=True): @@ -239,3 +240,18 @@ def spawning_platform(): Salt, however, will force macOS to spawning by default on all python versions """ return multiprocessing.get_start_method(allow_none=False) == "spawn" + + +def get_machine_identifier(): + """ + Provide the machine-id for machine/virtualization combination + """ + # Provides: + # machine-id + locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] + existing_locations = [loc for loc in locations if os.path.exists(loc)] + if not existing_locations: + return {} + else: + with _fopen(existing_locations[0]) as machineid: + return {"machine_id": machineid.read().strip()} diff --git a/salt/utils/process.py b/salt/utils/process.py index 6c1f755699d..1db75647a53 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -30,6 +30,7 @@ import salt.utils.path import salt.utils.platform import salt.utils.versions from salt.ext.tornado import gen +from salt.utils.platform import get_machine_identifier as _get_machine_identifier log = logging.getLogger(__name__) @@ -1075,9 +1076,7 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - mach_id = salt.utils.files.get_machine_identifier().get( - "machine_id", "no_machine_id_available" - ) + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") log.debug( "exiting for process id %s and machine identifer %s", os.getpid(), mach_id ) diff --git a/tests/pytests/functional/utils/gitfs/test_gitfs.py b/tests/pytests/functional/utils/gitfs/test_gitfs.py index 5b9263f57e8..e31e3afac4d 100644 --- a/tests/pytests/functional/utils/gitfs/test_gitfs.py +++ b/tests/pytests/functional/utils/gitfs/test_gitfs.py @@ -2,10 +2,10 @@ import os.path import pytest -import salt.utils.files from salt.fileserver.gitfs import PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES from salt.utils.gitfs import GitFS, GitPython, Pygit2 from salt.utils.immutabletypes import ImmutableDict, ImmutableList +from salt.utils.platform import get_machine_identifier as _get_machine_identifier pytestmark = [ pytest.mark.slow_test, @@ -249,9 +249,7 @@ def _test_lock(opts): g.fetch_remotes() assert len(g.remotes) == 1 repo = g.remotes[0] - mach_id = salt.utils.files.get_machine_identifier().get( - "machine_id", "no_machine_id_available" - ) + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 9cf58a67dbb..430dd464fe4 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -20,6 +20,7 @@ import salt.utils.path import salt.utils.platform import salt.utils.process from salt.utils.immutabletypes import freeze +from salt.utils.platform import get_machine_identifier as _get_machine_identifier from salt.utils.verify import verify_env try: @@ -379,9 +380,7 @@ def test_git_provider_mp_gen_lock(main_class, caplog): Check that gen_lock is obtains lock, and then releases, provider.lock() """ # get machine_identifier - mach_id = salt.utils.files.get_machine_identifier().get( - "machine_id", "no_machine_id_available" - ) + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() test_msg1 = ( @@ -418,9 +417,7 @@ def test_git_provider_mp_lock_dead_pid(main_class, caplog): Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ # get machine_identifier - mach_id = salt.utils.files.get_machine_identifier().get( - "machine_id", "no_machine_id_available" - ) + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() test_msg1 = ( @@ -493,9 +490,7 @@ def test_git_provider_mp_lock_bad_machine(main_class, caplog): Check that lock obtains lock, if previous pid in lock file doesn't exist for same machine id """ # get machine_identifier - mach_id = salt.utils.files.get_machine_identifier().get( - "machine_id", "no_machine_id_available" - ) + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") cur_pid = os.getpid() provider = main_class.remotes[0] From f7366267abf09b7ad34c23b81d2790d14b53fc13 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 7 May 2024 14:11:15 -0600 Subject: [PATCH 039/160] WIP Initial testing refactor --- tests/pytests/unit/utils/test_gitfs_locks.py | 426 +++++++++++++++---- 1 file changed, 335 insertions(+), 91 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 430dd464fe4..d0500e76a21 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -51,22 +51,338 @@ def _clear_instance_map(): pass -class AdaptedConfigurationTestCaseMixin: +## class AdaptedConfigurationTestCaseMixin: +## +## def __init__( +## self, +## salt_factories, +## salt_master_factory, +## salt_minion_factory, +## tmp_path, +## ): +## self._tmp_name = str(tmp_path) +## +## self._master_cfg = str(salt_master_factory.config["conf_file"]) +## self._minion_cfg = str(salt_minion_factory.config["conf_file"]) +## self._root_dir = str(salt_factories.root_dir.resolve()) +## self._user = _get_user() +## +## def get_temp_config(self, config_for, **config_overrides): +## +## rootdir = config_overrides.get("root_dir", self._root_dir) +## +## if not pathlib.Path(rootdir).exists(): +## pathlib.Path(rootdir).mkdir(exist_ok=True, parents=True) +## +## conf_dir = config_overrides.pop( +## "conf_dir", str(pathlib.PurePath(rootdir).joinpath("conf")) +## ) +## +## for key in ("cachedir", "pki_dir", "sock_dir"): +## if key not in config_overrides: +## config_overrides[key] = key +## if "log_file" not in config_overrides: +## config_overrides["log_file"] = f"logs/{config_for}.log".format() +## if "user" not in config_overrides: +## config_overrides["user"] = self._user +## config_overrides["root_dir"] = rootdir +## +## cdict = self.get_config( +## config_for, +## from_scratch=True, +## ) +## +## if config_for in ("master", "client_config"): +## rdict = salt.config.apply_master_config(config_overrides, cdict) +## if config_for == "minion": +## minion_id = ( +## config_overrides.get("id") +## or config_overrides.get("minion_id") +## or cdict.get("id") +## or cdict.get("minion_id") +## or random_string("temp-minion-") +## ) +## config_overrides["minion_id"] = config_overrides["id"] = minion_id +## rdict = salt.config.apply_minion_config( +## config_overrides, cdict, cache_minion_id=False, minion_id=minion_id +## ) +## +## verify_env( +## [ +## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions"), +## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_pre"), +## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_rejected"), +## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_denied"), +## pathlib.PurePath(rdict["cachedir"]).joinpath("jobs"), +## pathlib.PurePath(rdict["cachedir"]).joinpath("tokens"), +## pathlib.PurePath(rdict["root_dir"]).joinpath("cache", "tokens"), +## pathlib.PurePath(rdict["pki_dir"]).joinpath("accepted"), +## pathlib.PurePath(rdict["pki_dir"]).joinpath("rejected"), +## pathlib.PurePath(rdict["pki_dir"]).joinpath("pending"), +## pathlib.PurePath(rdict["log_file"]).parent, +## rdict["sock_dir"], +## conf_dir, +## ], +## self._user, +## root_dir=rdict["root_dir"], +## ) +## +## rdict["conf_file"] = pathlib.PurePath(conf_dir).joinpath(config_for) +## with salt.utils.files.fopen(rdict["conf_file"], "w") as wfh: +## salt.utils.yaml.safe_dump(rdict, wfh, default_flow_style=False) +## return rdict +## +## def get_config( +## self, +## config_for, +## from_scratch=False, +## ): +## if from_scratch: +## if config_for in ("master"): +## return salt.config.master_config(self._master_cfg) +## elif config_for in ("minion"): +## return salt.config.minion_config(self._minion_cfg) +## elif config_for == "client_config": +## return salt.config_client_config(self._master_cfg) +## if config_for not in ("master", "minion", "client_config"): +## if config_for in ("master"): +## return freeze(salt.config.master_config(self._master_cfg)) +## elif config_for in ("minion"): +## return freeze(salt.config.minion_config(self._minion_cfg)) +## elif config_for == "client_config": +## return freeze(salt.config.client_config(self._master_cfg)) +## +## log.error( +## "Should not reach this section of code for get_config, missing support for input config_for %s", +## config_for, +## ) +## +## # at least return master's config +## return freeze(salt.config.master_config(self._master_cfg)) +## +## @property +## def config_dir(self): +## return str(pathlib.PurePath(self._master_cfg).parent) +## +## def get_config_dir(self): +## log.warning("Use the config_dir attribute instead of calling get_config_dir()") +## return self.config_dir +## +## def get_config_file_path(self, filename): +## if filename == "master": +## return str(self._master_cfg) +## +## if filename == "minion": +## return str(self._minion_cfg) +## +## return str(self._master_cfg) +## +## @property +## def master_opts(self): +## """ +## Return the options used for the master +## """ +## return self.get_config("master") +## +## @property +## def minion_opts(self): +## """ +## Return the options used for the minion +## """ +## return self.get_config("minion") +## +## +## class MyGitBase(AdaptedConfigurationTestCaseMixin): +## """ +## mocked GitFS provider leveraging tmp_path +## """ +## +## def __init__( +## self, +## salt_factories, +## salt_master_factory, +## salt_minion_factory, +## tmp_path, +## ): +## super().__init__( +## salt_factories, +## salt_master_factory, +## salt_minion_factory, +## tmp_path, +## ) +## +## tmp_name = self._tmp_name.join("/git_test") +## pathlib.Path(tmp_name).mkdir(exist_ok=True, parents=True) +## +## class MockedProvider( +## salt.utils.gitfs.GitProvider +## ): # pylint: disable=abstract-method +## def __init__( +## self, +## opts, +## remote, +## per_remote_defaults, +## per_remote_only, +## override_params, +## cache_root, +## role="gitfs", +## ): +## self.provider = "mocked" +## self.fetched = False +## super().__init__( +## opts, +## remote, +## per_remote_defaults, +## per_remote_only, +## override_params, +## cache_root, +## role, +## ) +## +## def init_remote(self): +## self.gitdir = salt.utils.path.join(tmp_name, ".git") +## self.repo = True +## new = False +## return new +## +## def envs(self): +## return ["base"] +## +## def _fetch(self): +## self.fetched = True +## +## # Clear the instance map so that we make sure to create a new instance +## # for this test class. +## _clear_instance_map() +## +## git_providers = { +## "mocked": MockedProvider, +## } +## gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] +## +## self.opts = self.get_temp_config( +## "master", +## gitfs_remotes=gitfs_remotes, +## verified_gitfs_provider="mocked", +## ) +## self.main_class = salt.utils.gitfs.GitFS( +## self.opts, +## self.opts["gitfs_remotes"], +## per_remote_overrides=salt.fileserver.gitfs.PER_REMOTE_OVERRIDES, +## per_remote_only=salt.fileserver.gitfs.PER_REMOTE_ONLY, +## git_providers=git_providers, +## ) +## +## def cleanup(self): +## # Providers are preserved with GitFS's instance_map +## for remote in self.main_class.remotes: +## remote.fetched = False +## del self.main_class + + +class MyMockedGitProvider: + """ + mocked GitFS provider leveraging tmp_path + """ def __init__( self, salt_factories, salt_master_factory, salt_minion_factory, + salt_factories_root_dir, + temp_salt_master, + temp_salt_minion, tmp_path, ): self._tmp_name = str(tmp_path) + # TBD DGM + # pylint: disable=W1203 + self._root_dir = str(salt_factories.root_dir.resolve()) self._master_cfg = str(salt_master_factory.config["conf_file"]) self._minion_cfg = str(salt_minion_factory.config["conf_file"]) - self._root_dir = str(salt_factories.root_dir.resolve()) + log.warning( + f"DGM MyMockedGitProvider dunder init old, root_dir '{self._root_dir}', master_cfg '{self._master_cfg}', minion_cfg '{self._minion_cfg}'" + ) + + self._root_dir = str(salt_factories_root_dir) + self._master_cfg = str(temp_salt_master.config["conf_file"]) + self._minion_cfg = str(temp_salt_minion.config["conf_file"]) self._user = _get_user() + log.warning( + f"DGM MyMockedGitProvider dunder init new, root_dir '{self._root_dir}', master_cfg '{self._master_cfg}', minion_cfg '{self._minion_cfg}'" + ) + + tmp_name = self._tmp_name.join("/git_test") + pathlib.Path(tmp_name).mkdir(exist_ok=True, parents=True) + + class MockedProvider( + salt.utils.gitfs.GitProvider + ): # pylint: disable=abstract-method + def __init__( + self, + opts, + remote, + per_remote_defaults, + per_remote_only, + override_params, + cache_root, + role="gitfs", + ): + self.provider = "mocked" + self.fetched = False + super().__init__( + opts, + remote, + per_remote_defaults, + per_remote_only, + override_params, + cache_root, + role, + ) + + def init_remote(self): + self.gitdir = salt.utils.path.join(tmp_name, ".git") + self.repo = True + new = False + return new + + def envs(self): + return ["base"] + + def _fetch(self): + self.fetched = True + + # Clear the instance map so that we make sure to create a new instance + # for this test class. + _clear_instance_map() + + git_providers = { + "mocked": MockedProvider, + } + gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] + + self.opts = self.get_temp_config( + "master", + gitfs_remotes=gitfs_remotes, + verified_gitfs_provider="mocked", + ) + self.main_class = salt.utils.gitfs.GitFS( + self.opts, + self.opts["gitfs_remotes"], + per_remote_overrides=salt.fileserver.gitfs.PER_REMOTE_OVERRIDES, + per_remote_only=salt.fileserver.gitfs.PER_REMOTE_ONLY, + git_providers=git_providers, + ) + + def cleanup(self): + # Providers are preserved with GitFS's instance_map + for remote in self.main_class.remotes: + remote.fetched = False + del self.main_class + def get_temp_config(self, config_for, **config_overrides): rootdir = config_overrides.get("root_dir", self._root_dir) @@ -192,105 +508,33 @@ class AdaptedConfigurationTestCaseMixin: return self.get_config("minion") -class MyGitBase(AdaptedConfigurationTestCaseMixin): - """ - mocked GitFS provider leveraging tmp_path - """ - - def __init__( - self, - salt_factories, - salt_master_factory, - salt_minion_factory, - tmp_path, - ): - super().__init__( - salt_factories, - salt_master_factory, - salt_minion_factory, - tmp_path, - ) - - tmp_name = self._tmp_name.join("/git_test") - pathlib.Path(tmp_name).mkdir(exist_ok=True, parents=True) - - class MockedProvider( - salt.utils.gitfs.GitProvider - ): # pylint: disable=abstract-method - def __init__( - self, - opts, - remote, - per_remote_defaults, - per_remote_only, - override_params, - cache_root, - role="gitfs", - ): - self.provider = "mocked" - self.fetched = False - super().__init__( - opts, - remote, - per_remote_defaults, - per_remote_only, - override_params, - cache_root, - role, - ) - - def init_remote(self): - self.gitdir = salt.utils.path.join(tmp_name, ".git") - self.repo = True - new = False - return new - - def envs(self): - return ["base"] - - def _fetch(self): - self.fetched = True - - # Clear the instance map so that we make sure to create a new instance - # for this test class. - _clear_instance_map() - - git_providers = { - "mocked": MockedProvider, - } - gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] - - self.opts = self.get_temp_config( - "master", - gitfs_remotes=gitfs_remotes, - verified_gitfs_provider="mocked", - ) - self.main_class = salt.utils.gitfs.GitFS( - self.opts, - self.opts["gitfs_remotes"], - per_remote_overrides=salt.fileserver.gitfs.PER_REMOTE_OVERRIDES, - per_remote_only=salt.fileserver.gitfs.PER_REMOTE_ONLY, - git_providers=git_providers, - ) - - def cleanup(self): - # Providers are preserved with GitFS's instance_map - for remote in self.main_class.remotes: - remote.fetched = False - del self.main_class - - @pytest.fixture def main_class( salt_factories, salt_master_factory, salt_minion_factory, + salt_factories_root_dir, + temp_salt_master, + temp_salt_minion, tmp_path, ): - my_git_base = MyGitBase( + ## my_git_base = MyGitBase( + ## salt_factories, + ## salt_master_factory, + ## salt_minion_factory, + ## tmp_path, + ## ) + ## yield my_git_base.main_class + + ## my_git_base.cleanup() + + my_git_base = MyMockedGitProvider( salt_factories, salt_master_factory, salt_minion_factory, + salt_factories_root_dir, + temp_salt_master, + temp_salt_minion, tmp_path, ) yield my_git_base.main_class From f4d1e2ee494d3b7d77fbf33b8ac5397b760f29f1 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 8 May 2024 15:24:26 -0600 Subject: [PATCH 040/160] Fix typo --- tests/pytests/unit/utils/test_gitfs_locks.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index d0500e76a21..fa9310dbde9 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -290,7 +290,7 @@ class MyMockedGitProvider: salt_factories, salt_master_factory, salt_minion_factory, - salt_factories_root_dir, + salt_factories_default_root_dir, temp_salt_master, temp_salt_minion, tmp_path, @@ -306,7 +306,7 @@ class MyMockedGitProvider: f"DGM MyMockedGitProvider dunder init old, root_dir '{self._root_dir}', master_cfg '{self._master_cfg}', minion_cfg '{self._minion_cfg}'" ) - self._root_dir = str(salt_factories_root_dir) + self._root_dir = str(salt_factories_default_root_dir) self._master_cfg = str(temp_salt_master.config["conf_file"]) self._minion_cfg = str(temp_salt_minion.config["conf_file"]) self._user = _get_user() @@ -513,7 +513,7 @@ def main_class( salt_factories, salt_master_factory, salt_minion_factory, - salt_factories_root_dir, + salt_factories_default_root_dir, temp_salt_master, temp_salt_minion, tmp_path, @@ -532,7 +532,7 @@ def main_class( salt_factories, salt_master_factory, salt_minion_factory, - salt_factories_root_dir, + salt_factories_default_root_dir, temp_salt_master, temp_salt_minion, tmp_path, From 5910c162d720271e010e7534723fbcd2854bab7d Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 8 May 2024 15:38:34 -0600 Subject: [PATCH 041/160] Some clean up of old code --- tests/pytests/unit/utils/test_gitfs_locks.py | 254 ------------------- 1 file changed, 254 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index fa9310dbde9..28f451499bd 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -51,235 +51,6 @@ def _clear_instance_map(): pass -## class AdaptedConfigurationTestCaseMixin: -## -## def __init__( -## self, -## salt_factories, -## salt_master_factory, -## salt_minion_factory, -## tmp_path, -## ): -## self._tmp_name = str(tmp_path) -## -## self._master_cfg = str(salt_master_factory.config["conf_file"]) -## self._minion_cfg = str(salt_minion_factory.config["conf_file"]) -## self._root_dir = str(salt_factories.root_dir.resolve()) -## self._user = _get_user() -## -## def get_temp_config(self, config_for, **config_overrides): -## -## rootdir = config_overrides.get("root_dir", self._root_dir) -## -## if not pathlib.Path(rootdir).exists(): -## pathlib.Path(rootdir).mkdir(exist_ok=True, parents=True) -## -## conf_dir = config_overrides.pop( -## "conf_dir", str(pathlib.PurePath(rootdir).joinpath("conf")) -## ) -## -## for key in ("cachedir", "pki_dir", "sock_dir"): -## if key not in config_overrides: -## config_overrides[key] = key -## if "log_file" not in config_overrides: -## config_overrides["log_file"] = f"logs/{config_for}.log".format() -## if "user" not in config_overrides: -## config_overrides["user"] = self._user -## config_overrides["root_dir"] = rootdir -## -## cdict = self.get_config( -## config_for, -## from_scratch=True, -## ) -## -## if config_for in ("master", "client_config"): -## rdict = salt.config.apply_master_config(config_overrides, cdict) -## if config_for == "minion": -## minion_id = ( -## config_overrides.get("id") -## or config_overrides.get("minion_id") -## or cdict.get("id") -## or cdict.get("minion_id") -## or random_string("temp-minion-") -## ) -## config_overrides["minion_id"] = config_overrides["id"] = minion_id -## rdict = salt.config.apply_minion_config( -## config_overrides, cdict, cache_minion_id=False, minion_id=minion_id -## ) -## -## verify_env( -## [ -## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions"), -## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_pre"), -## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_rejected"), -## pathlib.PurePath(rdict["pki_dir"]).joinpath("minions_denied"), -## pathlib.PurePath(rdict["cachedir"]).joinpath("jobs"), -## pathlib.PurePath(rdict["cachedir"]).joinpath("tokens"), -## pathlib.PurePath(rdict["root_dir"]).joinpath("cache", "tokens"), -## pathlib.PurePath(rdict["pki_dir"]).joinpath("accepted"), -## pathlib.PurePath(rdict["pki_dir"]).joinpath("rejected"), -## pathlib.PurePath(rdict["pki_dir"]).joinpath("pending"), -## pathlib.PurePath(rdict["log_file"]).parent, -## rdict["sock_dir"], -## conf_dir, -## ], -## self._user, -## root_dir=rdict["root_dir"], -## ) -## -## rdict["conf_file"] = pathlib.PurePath(conf_dir).joinpath(config_for) -## with salt.utils.files.fopen(rdict["conf_file"], "w") as wfh: -## salt.utils.yaml.safe_dump(rdict, wfh, default_flow_style=False) -## return rdict -## -## def get_config( -## self, -## config_for, -## from_scratch=False, -## ): -## if from_scratch: -## if config_for in ("master"): -## return salt.config.master_config(self._master_cfg) -## elif config_for in ("minion"): -## return salt.config.minion_config(self._minion_cfg) -## elif config_for == "client_config": -## return salt.config_client_config(self._master_cfg) -## if config_for not in ("master", "minion", "client_config"): -## if config_for in ("master"): -## return freeze(salt.config.master_config(self._master_cfg)) -## elif config_for in ("minion"): -## return freeze(salt.config.minion_config(self._minion_cfg)) -## elif config_for == "client_config": -## return freeze(salt.config.client_config(self._master_cfg)) -## -## log.error( -## "Should not reach this section of code for get_config, missing support for input config_for %s", -## config_for, -## ) -## -## # at least return master's config -## return freeze(salt.config.master_config(self._master_cfg)) -## -## @property -## def config_dir(self): -## return str(pathlib.PurePath(self._master_cfg).parent) -## -## def get_config_dir(self): -## log.warning("Use the config_dir attribute instead of calling get_config_dir()") -## return self.config_dir -## -## def get_config_file_path(self, filename): -## if filename == "master": -## return str(self._master_cfg) -## -## if filename == "minion": -## return str(self._minion_cfg) -## -## return str(self._master_cfg) -## -## @property -## def master_opts(self): -## """ -## Return the options used for the master -## """ -## return self.get_config("master") -## -## @property -## def minion_opts(self): -## """ -## Return the options used for the minion -## """ -## return self.get_config("minion") -## -## -## class MyGitBase(AdaptedConfigurationTestCaseMixin): -## """ -## mocked GitFS provider leveraging tmp_path -## """ -## -## def __init__( -## self, -## salt_factories, -## salt_master_factory, -## salt_minion_factory, -## tmp_path, -## ): -## super().__init__( -## salt_factories, -## salt_master_factory, -## salt_minion_factory, -## tmp_path, -## ) -## -## tmp_name = self._tmp_name.join("/git_test") -## pathlib.Path(tmp_name).mkdir(exist_ok=True, parents=True) -## -## class MockedProvider( -## salt.utils.gitfs.GitProvider -## ): # pylint: disable=abstract-method -## def __init__( -## self, -## opts, -## remote, -## per_remote_defaults, -## per_remote_only, -## override_params, -## cache_root, -## role="gitfs", -## ): -## self.provider = "mocked" -## self.fetched = False -## super().__init__( -## opts, -## remote, -## per_remote_defaults, -## per_remote_only, -## override_params, -## cache_root, -## role, -## ) -## -## def init_remote(self): -## self.gitdir = salt.utils.path.join(tmp_name, ".git") -## self.repo = True -## new = False -## return new -## -## def envs(self): -## return ["base"] -## -## def _fetch(self): -## self.fetched = True -## -## # Clear the instance map so that we make sure to create a new instance -## # for this test class. -## _clear_instance_map() -## -## git_providers = { -## "mocked": MockedProvider, -## } -## gitfs_remotes = ["file://repo1.git", {"file://repo2.git": [{"name": "repo2"}]}] -## -## self.opts = self.get_temp_config( -## "master", -## gitfs_remotes=gitfs_remotes, -## verified_gitfs_provider="mocked", -## ) -## self.main_class = salt.utils.gitfs.GitFS( -## self.opts, -## self.opts["gitfs_remotes"], -## per_remote_overrides=salt.fileserver.gitfs.PER_REMOTE_OVERRIDES, -## per_remote_only=salt.fileserver.gitfs.PER_REMOTE_ONLY, -## git_providers=git_providers, -## ) -## -## def cleanup(self): -## # Providers are preserved with GitFS's instance_map -## for remote in self.main_class.remotes: -## remote.fetched = False -## del self.main_class - - class MyMockedGitProvider: """ mocked GitFS provider leveraging tmp_path @@ -287,9 +58,6 @@ class MyMockedGitProvider: def __init__( self, - salt_factories, - salt_master_factory, - salt_minion_factory, salt_factories_default_root_dir, temp_salt_master, temp_salt_minion, @@ -299,12 +67,6 @@ class MyMockedGitProvider: # TBD DGM # pylint: disable=W1203 - self._root_dir = str(salt_factories.root_dir.resolve()) - self._master_cfg = str(salt_master_factory.config["conf_file"]) - self._minion_cfg = str(salt_minion_factory.config["conf_file"]) - log.warning( - f"DGM MyMockedGitProvider dunder init old, root_dir '{self._root_dir}', master_cfg '{self._master_cfg}', minion_cfg '{self._minion_cfg}'" - ) self._root_dir = str(salt_factories_default_root_dir) self._master_cfg = str(temp_salt_master.config["conf_file"]) @@ -510,28 +272,12 @@ class MyMockedGitProvider: @pytest.fixture def main_class( - salt_factories, - salt_master_factory, - salt_minion_factory, salt_factories_default_root_dir, temp_salt_master, temp_salt_minion, tmp_path, ): - ## my_git_base = MyGitBase( - ## salt_factories, - ## salt_master_factory, - ## salt_minion_factory, - ## tmp_path, - ## ) - ## yield my_git_base.main_class - - ## my_git_base.cleanup() - my_git_base = MyMockedGitProvider( - salt_factories, - salt_master_factory, - salt_minion_factory, salt_factories_default_root_dir, temp_salt_master, temp_salt_minion, From 56f3cbeff8543f536c9a8e2140ead86da19c1d46 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 9 May 2024 16:36:58 -0600 Subject: [PATCH 042/160] Removed circular dependency on salt.utils.files.fopen --- salt/utils/platform.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/salt/utils/platform.py b/salt/utils/platform.py index 667414b3f71..b64352656cd 100644 --- a/salt/utils/platform.py +++ b/salt/utils/platform.py @@ -12,7 +12,8 @@ import sys import distro from salt.utils.decorators import memoize as real_memoize -from salt.utils.files import fopen as _fopen + +## from salt.utils.files import fopen as _fopen def linux_distribution(full_distribution_name=True): @@ -246,6 +247,7 @@ def get_machine_identifier(): """ Provide the machine-id for machine/virtualization combination """ + # pylint: disable=resource-leakage # Provides: # machine-id locations = ["/etc/machine-id", "/var/lib/dbus/machine-id"] @@ -253,5 +255,9 @@ def get_machine_identifier(): if not existing_locations: return {} else: - with _fopen(existing_locations[0]) as machineid: + ## with _fopen(existing_locations[0]) as machineid: + # cannot use salt.utils.files.fopen due to circular dependency + with open( + existing_locations[0], encoding=__salt_system_encoding__ + ) as machineid: return {"machine_id": machineid.read().strip()} From b9203f80db839b5a8e0f4dda522b7b9af0f243ed Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 10 May 2024 09:46:27 -0600 Subject: [PATCH 043/160] Add allowance for machine identifier for recent test --- tests/pytests/functional/utils/gitfs/test_pillar.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/tests/pytests/functional/utils/gitfs/test_pillar.py b/tests/pytests/functional/utils/gitfs/test_pillar.py index 5d6729dc5f7..982d0558184 100644 --- a/tests/pytests/functional/utils/gitfs/test_pillar.py +++ b/tests/pytests/functional/utils/gitfs/test_pillar.py @@ -5,6 +5,7 @@ import pytest from salt.pillar.git_pillar import GLOBAL_ONLY, PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES from salt.utils.gitfs import GitPillar, GitPython, Pygit2 from salt.utils.immutabletypes import ImmutableDict, ImmutableList +from salt.utils.platform import get_machine_identifier as _get_machine_identifier pytestmark = [ pytest.mark.windows_whitelisted, @@ -339,17 +340,24 @@ def _test_lock(opts): p.fetch_remotes() assert len(p.remotes) == 1 repo = p.remotes[0] + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ - "Set update lock for git_pillar remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ( + f"Set update lock for gitfs remote " + f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ) ], [], ) assert os.path.isfile(repo._get_lock_file()) assert repo.clear_lock() == ( [ - "Removed update lock for git_pillar remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ( + f"Removed update lock for gitfs remote " + f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ) ], [], ) From 7b40bdd4f991d08b6321227c0bacb7d4feea7aca Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 10 May 2024 15:00:58 -0600 Subject: [PATCH 044/160] Fix typo in assert check --- tests/pytests/functional/utils/gitfs/test_pillar.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/functional/utils/gitfs/test_pillar.py b/tests/pytests/functional/utils/gitfs/test_pillar.py index 982d0558184..8e5a1aa52ca 100644 --- a/tests/pytests/functional/utils/gitfs/test_pillar.py +++ b/tests/pytests/functional/utils/gitfs/test_pillar.py @@ -345,7 +345,7 @@ def _test_lock(opts): assert repo.lock() == ( [ ( - f"Set update lock for gitfs remote " + f"Set update lock for git_pillar remote " f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" ) ], @@ -355,7 +355,7 @@ def _test_lock(opts): assert repo.clear_lock() == ( [ ( - f"Removed update lock for gitfs remote " + f"Removed update lock for git_pillar remote " f"'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" ) ], From 55fa8cee80031add61049b8ba91a5c828d8ffb25 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 13 May 2024 09:18:56 -0600 Subject: [PATCH 045/160] Cleanup debug logging --- tests/pytests/unit/utils/test_gitfs_locks.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 28f451499bd..e460ef6b63e 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -65,18 +65,11 @@ class MyMockedGitProvider: ): self._tmp_name = str(tmp_path) - # TBD DGM - # pylint: disable=W1203 - self._root_dir = str(salt_factories_default_root_dir) self._master_cfg = str(temp_salt_master.config["conf_file"]) self._minion_cfg = str(temp_salt_minion.config["conf_file"]) self._user = _get_user() - log.warning( - f"DGM MyMockedGitProvider dunder init new, root_dir '{self._root_dir}', master_cfg '{self._master_cfg}', minion_cfg '{self._minion_cfg}'" - ) - tmp_name = self._tmp_name.join("/git_test") pathlib.Path(tmp_name).mkdir(exist_ok=True, parents=True) From af5a48579390b923b61cd9e76ebe0b1183d1b898 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 13 May 2024 09:46:50 -0600 Subject: [PATCH 046/160] Updates after reviewer comments --- salt/utils/platform.py | 3 --- salt/utils/process.py | 4 ++-- tests/pytests/unit/utils/test_gitfs_locks.py | 2 +- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/salt/utils/platform.py b/salt/utils/platform.py index b64352656cd..100918b72d5 100644 --- a/salt/utils/platform.py +++ b/salt/utils/platform.py @@ -13,8 +13,6 @@ import distro from salt.utils.decorators import memoize as real_memoize -## from salt.utils.files import fopen as _fopen - def linux_distribution(full_distribution_name=True): """ @@ -255,7 +253,6 @@ def get_machine_identifier(): if not existing_locations: return {} else: - ## with _fopen(existing_locations[0]) as machineid: # cannot use salt.utils.files.fopen due to circular dependency with open( existing_locations[0], encoding=__salt_system_encoding__ diff --git a/salt/utils/process.py b/salt/utils/process.py index 1db75647a53..a76aec7dfc8 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -13,6 +13,7 @@ import logging import multiprocessing import multiprocessing.util import os +import pathlib import queue import shutil import signal @@ -21,7 +22,6 @@ import subprocess import sys import threading import time -from pathlib import Path import salt._logging import salt.defaults.exitcodes @@ -1106,7 +1106,7 @@ class SignalHandlingProcess(Process): if cache_dir and gitfs_active: # check for gitfs file locks to ensure no resource leaks # last chance to clean up any missed unlock droppings - cache_dir = Path(cache_dir + "/gitfs/work") + cache_dir = pathlib.Path(cache_dir + "/gitfs/work") if cache_dir.exists and cache_dir.is_dir(): file_list = list(cache_dir.glob("**/*.lk")) file_del_list = [] diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index e460ef6b63e..00a2faa7bcb 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -28,7 +28,7 @@ try: except ImportError: import salt.utils.win_functions -pytestmark = [pytest.mark.skip_unless_on_linux] +pytestmark = [pytest.mark.skip_on_windows] log = logging.getLogger(__name__) From 81e39a30421ff0a12c1d41f2cf17dabc0470a5d4 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 16 May 2024 16:35:38 -0600 Subject: [PATCH 047/160] Refractor after review --- salt/utils/gitfs.py | 122 ++++++++++++++++++++++++ salt/utils/process.py | 211 +++++++++++++++++++++--------------------- 2 files changed, 229 insertions(+), 104 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index ffaecfc97ff..4b1569a9fee 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -13,6 +13,7 @@ import io import logging import multiprocessing import os +import pathlib import shlex import shutil import stat @@ -82,6 +83,14 @@ _INVALID_REPO = ( log = logging.getLogger(__name__) +HAS_PSUTIL = False +try: + import psutil + + HAS_PSUTIL = True +except ImportError: + pass + # pylint: disable=import-error try: if ( @@ -516,6 +525,18 @@ class GitProvider: os.makedirs(self._salt_working_dir) self.fetch_request_check() + if HAS_PSUTIL: + cur_pid = os.getpid() + process = psutil.Process(cur_pid) + if isinstance(process, salt.utils.process.Process): + cache_dir = self.opts.get("cachedir", None) + gitfs_active = self.opts.get("gitfs_remotes", None) + if cache_dir and gitfs_active: + log.warning( + "DGM class GitProvider registering gitfs_zombie_cleanup" + ) + process.register_finalize_method(gitfs_zombie_cleanup, cache_dir) + def get_cache_basehash(self): return self._cache_basehash @@ -3631,3 +3652,104 @@ class WinRepo(GitBase): cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail) if cachedir is not None: self.winrepo_dirs[repo.id] = cachedir + + +## DGM wip code +def gitfs_zombie_cleanup(cache_dir): + """ + Clean up zombie processes that used gitfs + Initial wip + """ + log.warning("DGM class GitProvider gitfs_zombie_cleanup entry") + cur_pid = os.getpid() + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") + log.debug("exiting for process id %s and machine identifer %s", cur_pid, mach_id) + + # need to clean up any resources left around like lock files if using gitfs + # example: lockfile + # /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + # check for gitfs file locks to ensure no resource leaks + # last chance to clean up any missed unlock droppings + cache_dir = pathlib.Path(cache_dir + "/gitfs/work") + if cache_dir.exists and cache_dir.is_dir(): + file_list = list(cache_dir.glob("**/*.lk")) + file_del_list = [] + file_pid = 0 + file_mach_id = 0 + try: + for file_name in file_list: + with salt.utils.files.fopen(file_name, "r") as fd_: + try: + file_pid = int( + salt.utils.stringutils.to_unicode(fd_.readline()).rstrip() + ) + except ValueError: + # Lock file is empty, set pid to 0 so it evaluates as False. + file_pid = 0 + try: + file_mach_id = salt.utils.stringutils.to_unicode( + fd_.readline() + ).rstrip() + except ValueError: + # Lock file is empty, set mach_id to 0 so it evaluates False. + file_mach_id = 0 + + if cur_pid == file_pid: + if mach_id != file_mach_id: + if not file_mach_id: + msg = ( + f"gitfs lock file for pid '{file_pid}' does not " + "contain a machine id, deleting lock file which may " + "affect if using multi-master with shared gitfs cache, " + "the lock may have been obtained by another master " + "recommend updating Salt version on other masters to a " + "version which insert machine identification in lock a file." + ) + log.debug(msg) + file_del_list.append((file_name, file_pid, file_mach_id)) + else: + file_del_list.append((file_name, file_pid, file_mach_id)) + + except FileNotFoundError: + log.debug("gitfs lock file: %s not found", file_name) + + for file_name, file_pid, file_mach_id in file_del_list: + try: + os.remove(file_name) + except OSError as exc: + if exc.errno == errno.ENOENT: + # No lock file present + msg = ( + "SIGTERM clean up of resources attempted to remove lock " + f"file {file_name}, pid '{file_pid}', machine identifier " + f"'{mach_id}' but it did not exist, exception : {exc} " + ) + log.debug(msg) + + elif exc.errno == errno.EISDIR: + # Somehow this path is a directory. Should never happen + # unless some wiseguy manually creates a directory at this + # path, but just in case, handle it. + try: + shutil.rmtree(file_name) + except OSError as exc: + msg = ( + f"SIGTERM clean up of resources, lock file '{file_name}'" + f", pid '{file_pid}', machine identifier '{file_mach_id}'" + f"was a directory, removed directory, exception : '{exc}'" + ) + log.debug(msg) + else: + msg = ( + "SIGTERM clean up of resources, unable to remove lock file " + f"'{file_name}', pid '{file_pid}', machine identifier " + f"'{file_mach_id}', exception : '{exc}'" + ) + log.debug(msg) + else: + msg = ( + "SIGTERM clean up of resources, removed lock file " + f"'{file_name}', pid '{file_pid}', machine identifier " + f"'{file_mach_id}'" + ) + log.debug(msg) diff --git a/salt/utils/process.py b/salt/utils/process.py index a76aec7dfc8..6a8bcad0d4e 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -13,9 +13,7 @@ import logging import multiprocessing import multiprocessing.util import os -import pathlib import queue -import shutil import signal import socket import subprocess @@ -32,6 +30,10 @@ import salt.utils.versions from salt.ext.tornado import gen from salt.utils.platform import get_machine_identifier as _get_machine_identifier +## DGM import pathlib +## DGM import shutil + + log = logging.getLogger(__name__) HAS_PSUTIL = False @@ -210,7 +212,7 @@ def get_process_info(pid=None): # pid_exists can have false positives # for example Windows reserves PID 5 in a hack way - # another reasons is the the process requires kernel permissions + # another reasons is the process requires kernel permissions try: raw_process_info.status() except psutil.NoSuchProcess: @@ -1076,15 +1078,16 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") - log.debug( - "exiting for process id %s and machine identifer %s", os.getpid(), mach_id - ) + ## DGM mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") + ## DGM log.debug( + ## DGM "exiting for process id %s and machine identifer %s", os.getpid(), mach_id + ## DGM ) + ## DGM + ## DGM cur_pid = os.getpid() - cur_pid = os.getpid() if HAS_PSUTIL: try: - process = psutil.Process(cur_pid) + process = psutil.Process(os.getpid()) if hasattr(process, "children"): for child in process.children(recursive=True): try: @@ -1098,105 +1101,105 @@ class SignalHandlingProcess(Process): os.getpid(), ) - # need to clean up any resources left around like lock files if using gitfs - # example: lockfile i - # /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk - cache_dir = self.opts.get("cachedir", None) - gitfs_active = self.opts.get("gitfs_remotes", None) - if cache_dir and gitfs_active: - # check for gitfs file locks to ensure no resource leaks - # last chance to clean up any missed unlock droppings - cache_dir = pathlib.Path(cache_dir + "/gitfs/work") - if cache_dir.exists and cache_dir.is_dir(): - file_list = list(cache_dir.glob("**/*.lk")) - file_del_list = [] - file_pid = 0 - file_mach_id = 0 - try: - for file_name in file_list: - with salt.utils.files.fopen(file_name, "r") as fd_: - try: - file_pid = int( - salt.utils.stringutils.to_unicode( - fd_.readline() - ).rstrip() - ) - except ValueError: - # Lock file is empty, set pid to 0 so it evaluates as False. - file_pid = 0 - try: - file_mach_id = ( - salt.utils.stringutils.to_unicode( - fd_.readline() - ).rstrip() - ) - except ValueError: - # Lock file is empty, set mach_id to 0 so it evaluates False. - file_mach_id = 0 + ## DGM # need to clean up any resources left around like lock files if using gitfs + ## DGM # example: lockfile i + ## DGM # /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk + ## DGM cache_dir = self.opts.get("cachedir", None) + ## DGM gitfs_active = self.opts.get("gitfs_remotes", None) + ## DGM if cache_dir and gitfs_active: + ## DGM # check for gitfs file locks to ensure no resource leaks + ## DGM # last chance to clean up any missed unlock droppings + ## DGM cache_dir = pathlib.Path(cache_dir + "/gitfs/work") + ## DGM if cache_dir.exists and cache_dir.is_dir(): + ## DGM file_list = list(cache_dir.glob("**/*.lk")) + ## DGM file_del_list = [] + ## DGM file_pid = 0 + ## DGM file_mach_id = 0 + ## DGM try: + ## DGM for file_name in file_list: + ## DGM with salt.utils.files.fopen(file_name, "r") as fd_: + ## DGM try: + ## DGM file_pid = int( + ## DGM salt.utils.stringutils.to_unicode( + ## DGM fd_.readline() + ## DGM ).rstrip() + ## DGM ) + ## DGM except ValueError: + ## DGM # Lock file is empty, set pid to 0 so it evaluates as False. + ## DGM file_pid = 0 + ## DGM try: + ## DGM file_mach_id = ( + ## DGM salt.utils.stringutils.to_unicode( + ## DGM fd_.readline() + ## DGM ).rstrip() + ## DGM ) + ## DGM except ValueError: + ## DGM # Lock file is empty, set mach_id to 0 so it evaluates False. + ## DGM file_mach_id = 0 - if cur_pid == file_pid: - if mach_id != file_mach_id: - if not file_mach_id: - msg = ( - f"gitfs lock file for pid '{file_pid}' does not " - "contain a machine id, deleting lock file which may " - "affect if using multi-master with shared gitfs cache, " - "the lock may have been obtained by another master " - "recommend updating Salt version on other masters to a " - "version which insert machine identification in lock a file." - ) - log.debug(msg) - file_del_list.append( - (file_name, file_pid, file_mach_id) - ) - else: - file_del_list.append( - (file_name, file_pid, file_mach_id) - ) + ## DGM if cur_pid == file_pid: + ## DGM if mach_id != file_mach_id: + ## DGM if not file_mach_id: + ## DGM msg = ( + ## DGM f"gitfs lock file for pid '{file_pid}' does not " + ## DGM "contain a machine id, deleting lock file which may " + ## DGM "affect if using multi-master with shared gitfs cache, " + ## DGM "the lock may have been obtained by another master " + ## DGM "recommend updating Salt version on other masters to a " + ## DGM "version which insert machine identification in lock a file." + ## DGM ) + ## DGM log.debug(msg) + ## DGM file_del_list.append( + ## DGM (file_name, file_pid, file_mach_id) + ## DGM ) + ## DGM else: + ## DGM file_del_list.append( + ## DGM (file_name, file_pid, file_mach_id) + ## DGM ) - except FileNotFoundError: - log.debug("gitfs lock file: %s not found", file_name) + ## DGM except FileNotFoundError: + ## DGM log.debug("gitfs lock file: %s not found", file_name) - for file_name, file_pid, file_mach_id in file_del_list: - try: - os.remove(file_name) - except OSError as exc: - if exc.errno == errno.ENOENT: - # No lock file present - msg = ( - "SIGTERM clean up of resources attempted to remove lock " - f"file {file_name}, pid '{file_pid}', machine identifier " - f"'{mach_id}' but it did not exist, exception : {exc} " - ) - log.debug(msg) + ## DGM for file_name, file_pid, file_mach_id in file_del_list: + ## DGM try: + ## DGM os.remove(file_name) + ## DGM except OSError as exc: + ## DGM if exc.errno == errno.ENOENT: + ## DGM # No lock file present + ## DGM msg = ( + ## DGM "SIGTERM clean up of resources attempted to remove lock " + ## DGM f"file {file_name}, pid '{file_pid}', machine identifier " + ## DGM f"'{mach_id}' but it did not exist, exception : {exc} " + ## DGM ) + ## DGM log.debug(msg) - elif exc.errno == errno.EISDIR: - # Somehow this path is a directory. Should never happen - # unless some wiseguy manually creates a directory at this - # path, but just in case, handle it. - try: - shutil.rmtree(file_name) - except OSError as exc: - msg = ( - f"SIGTERM clean up of resources, lock file '{file_name}'" - f", pid '{file_pid}', machine identifier '{file_mach_id}'" - f"was a directory, removed directory, exception : '{exc}'" - ) - log.debug(msg) - else: - msg = ( - "SIGTERM clean up of resources, unable to remove lock file " - f"'{file_name}', pid '{file_pid}', machine identifier " - f"'{file_mach_id}', exception : '{exc}'" - ) - log.debug(msg) - else: - msg = ( - "SIGTERM clean up of resources, removed lock file " - f"'{file_name}', pid '{file_pid}', machine identifier " - f"'{file_mach_id}'" - ) - log.debug(msg) + ## DGM elif exc.errno == errno.EISDIR: + ## DGM # Somehow this path is a directory. Should never happen + ## DGM # unless some wiseguy manually creates a directory at this + ## DGM # path, but just in case, handle it. + ## DGM try: + ## DGM shutil.rmtree(file_name) + ## DGM except OSError as exc: + ## DGM msg = ( + ## DGM f"SIGTERM clean up of resources, lock file '{file_name}'" + ## DGM f", pid '{file_pid}', machine identifier '{file_mach_id}'" + ## DGM f"was a directory, removed directory, exception : '{exc}'" + ## DGM ) + ## DGM log.debug(msg) + ## DGM else: + ## DGM msg = ( + ## DGM "SIGTERM clean up of resources, unable to remove lock file " + ## DGM f"'{file_name}', pid '{file_pid}', machine identifier " + ## DGM f"'{file_mach_id}', exception : '{exc}'" + ## DGM ) + ## DGM log.debug(msg) + ## DGM else: + ## DGM msg = ( + ## DGM "SIGTERM clean up of resources, removed lock file " + ## DGM f"'{file_name}', pid '{file_pid}', machine identifier " + ## DGM f"'{file_mach_id}'" + ## DGM ) + ## DGM log.debug(msg) except psutil.NoSuchProcess: log.warning( From eb07f58277f524d33e6c9a8243e325043193a120 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 17 May 2024 10:10:16 -0600 Subject: [PATCH 048/160] Added debugging --- salt/utils/gitfs.py | 28 +++++++++++++++++--- tests/pytests/unit/utils/test_gitfs_locks.py | 17 ++++++++++-- 2 files changed, 39 insertions(+), 6 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 4b1569a9fee..3b94e660f87 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -251,7 +251,7 @@ class GitProvider: override_params, cache_root, role="gitfs", - ): + ): # pylint: disable=logging-fstring-interpolation self.opts = opts self.role = role @@ -528,12 +528,27 @@ class GitProvider: if HAS_PSUTIL: cur_pid = os.getpid() process = psutil.Process(cur_pid) + log.warning( + f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}'" + ) + print( + f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}'" + ) if isinstance(process, salt.utils.process.Process): cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) + log.warning( + f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + ) + print( + f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + ) if cache_dir and gitfs_active: log.warning( - "DGM class GitProvider registering gitfs_zombie_cleanup" + f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'" + ) + print( + f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'" ) process.register_finalize_method(gitfs_zombie_cleanup, cache_dir) @@ -3654,16 +3669,21 @@ class WinRepo(GitBase): self.winrepo_dirs[repo.id] = cachedir -## DGM wip code +## DGM wip code +# pylint: disable=logging-fstring-interpolation def gitfs_zombie_cleanup(cache_dir): """ Clean up zombie processes that used gitfs Initial wip """ - log.warning("DGM class GitProvider gitfs_zombie_cleanup entry") + log.warning( + f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'" + ) + print(f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'") cur_pid = os.getpid() mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") log.debug("exiting for process id %s and machine identifer %s", cur_pid, mach_id) + print(f"exiting for process id '{cur_pid}' and machine identifer '{mach_id}'") # need to clean up any resources left around like lock files if using gitfs # example: lockfile diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 00a2faa7bcb..3a342ee5abb 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -562,11 +562,16 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): @pytest.mark.slow_test @pytest.mark.skip_unless_on_linux @pytest.mark.timeout_unless_on_windows(120) -def test_git_provider_sigterm_cleanup(main_class): +def test_git_provider_sigterm_cleanup( + main_class, +): # pylint: disable=logging-fstring-interpolation """ Start process which will obtain lock, and leave it locked then kill the process via SIGTERM and ensure locked resources are cleaned up """ + log.warning("DGM test_git_provider_sigterm_cleanup entry") + print("DGM test_git_provider_sigterm_cleanup entry") + provider = main_class.remotes[0] with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): @@ -576,6 +581,11 @@ def test_git_provider_sigterm_cleanup(main_class): while not proc.is_alive(): time.sleep(1) # give some time for it to be started + log.warning( + f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}'" + ) + print(f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}'") + procmgr.run(asynchronous=True) time.sleep(2) # give some time for it to terminate @@ -583,12 +593,15 @@ def test_git_provider_sigterm_cleanup(main_class): # child process should be alive file_name = provider._get_lock_file("update") + log.warning(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'") + print(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'") + assert pathlib.Path(file_name).exists() assert pathlib.Path(file_name).is_file() procmgr.terminate() # sends a SIGTERM - time.sleep(1) # give some time for it to terminate + time.sleep(2) # give some time for it to terminate assert not proc.is_alive() assert not pathlib.Path(file_name).exists() From 01a44b41f8fec8637095cbff410359340b10ca92 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 17 May 2024 14:49:15 -0600 Subject: [PATCH 049/160] Added debugging info --- salt/utils/process.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/salt/utils/process.py b/salt/utils/process.py index 6a8bcad0d4e..16558c2236f 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -1018,6 +1018,13 @@ class Process(multiprocessing.Process): # Run any registered process finalization routines try: for method, args, kwargs in self._finalize_methods: + # pylint: disable=logging-fstring-interpolation + log.warning( + f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'" + ) + print( + f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'" + ) try: method(*args, **kwargs) except Exception: # pylint: disable=broad-except @@ -1050,8 +1057,21 @@ class Process(multiprocessing.Process): """ Register a function to run as process terminates """ + # pylint: disable=logging-fstring-interpolation + log.warning( + f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'" + ) + print( + f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'" + ) finalize_method_tuple = (function, args, kwargs) if finalize_method_tuple not in self._finalize_methods: + log.warning( + f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'" + ) + print( + f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'" + ) self._finalize_methods.append(finalize_method_tuple) From 8198500f51ef2bfaaabed9a0c50a10a460d0fdbb Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 20 May 2024 15:07:24 -0600 Subject: [PATCH 050/160] Saving debug info for proc and psutil processes which shows difference - psutil.process(pid) doesn't give register function --- salt/utils/gitfs.py | 3 +- tests/pytests/unit/utils/test_gitfs_locks.py | 34 ++++++++++++++++++++ 2 files changed, 36 insertions(+), 1 deletion(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index 3b94e660f87..fdd9b05a51c 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -528,11 +528,12 @@ class GitProvider: if HAS_PSUTIL: cur_pid = os.getpid() process = psutil.Process(cur_pid) + dgm_process_dir = dir(process) log.warning( f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}'" ) print( - f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}'" + f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}', process dir '{dgm_process_dir}'" ) if isinstance(process, salt.utils.process.Process): cache_dir = self.opts.get("cachedir", None) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 3a342ee5abb..eb840b5564a 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -9,6 +9,7 @@ import pathlib import signal import time +import psutil import pytest from saltfactories.utils import random_string @@ -586,6 +587,39 @@ def test_git_provider_sigterm_cleanup( ) print(f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}'") + print("DGM test area entry") + + dgm_proc_dir = dir(proc) + print( + f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}', proc dir '{dgm_proc_dir}'" + ) + + dgm_pid = proc.pid + print(f"DGM test_git_provider_sigterm_cleanup, proc pid '{proc.pid}'") + + dgm_process = psutil.Process(dgm_pid) + print(f"DGM test_git_provider_sigterm_cleanup, psutil process '{dgm_process}'") + + if isinstance(proc, salt.utils.process.Process): + print( + "DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is TRUE" + ) + else: + print( + "DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is FALSE" + ) + + if isinstance(dgm_process, salt.utils.process.Process): + print( + "DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is TRUE" + ) + else: + print( + "DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is FALSE" + ) + + print("DGM test area exit") + procmgr.run(asynchronous=True) time.sleep(2) # give some time for it to terminate From 9bb92ed23dc8498566bd8fdd4753eace085ee727 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 22 May 2024 10:59:21 -0600 Subject: [PATCH 051/160] Revised finalization of processes --- salt/utils/gitfs.py | 50 ++++--- salt/utils/process.py | 143 ++++++++++++++++++- tests/pytests/unit/utils/test_gitfs_locks.py | 71 +++++---- 3 files changed, 212 insertions(+), 52 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index fdd9b05a51c..ee615bae49b 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -33,6 +33,7 @@ import salt.utils.hashutils import salt.utils.itertools import salt.utils.path import salt.utils.platform +import salt.utils.process import salt.utils.stringutils import salt.utils.url import salt.utils.user @@ -43,7 +44,6 @@ from salt.exceptions import FileserverConfigError, GitLockError, get_error_messa from salt.utils.event import tagify from salt.utils.odict import OrderedDict from salt.utils.platform import get_machine_identifier as _get_machine_identifier -from salt.utils.process import os_is_running as pid_exists from salt.utils.versions import Version VALID_REF_TYPES = _DEFAULT_MASTER_OPTS["gitfs_ref_types"] @@ -258,6 +258,8 @@ class GitProvider: def _val_cb(x, y): return str(y) + ## DGM print(f"DGM class GitProvider dunder init, opts '{opts}'", flush=True) + # get machine_identifier self.mach_id = _get_machine_identifier().get( "machine_id", "no_machine_id_available" @@ -533,25 +535,29 @@ class GitProvider: f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}'" ) print( - f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}', process dir '{dgm_process_dir}'" + f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}', process dir '{dgm_process_dir}'", + flush=True, ) - if isinstance(process, salt.utils.process.Process): - cache_dir = self.opts.get("cachedir", None) - gitfs_active = self.opts.get("gitfs_remotes", None) + cache_dir = self.opts.get("cachedir", None) + gitfs_active = self.opts.get("gitfs_remotes", None) + log.warning( + f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + ) + print( + f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'", + flush=True, + ) + if cache_dir and gitfs_active: log.warning( - f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'" ) print( - f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" + f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'", + flush=True, + ) + salt.utils.process.register_cleanup_zombie_function( + gitfs_zombie_cleanup, cache_dir ) - if cache_dir and gitfs_active: - log.warning( - f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'" - ) - print( - f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'" - ) - process.register_finalize_method(gitfs_zombie_cleanup, cache_dir) def get_cache_basehash(self): return self._cache_basehash @@ -1028,7 +1034,7 @@ class GitProvider: if self.mach_id or mach_id: msg += f" for machine_id {mach_id}, current machine_id {self.mach_id}" - if not pid_exists(pid): + if not salt.utils.process.os_is_running(pid): if self.mach_id != mach_id: msg += ( " but this process is not running. The " @@ -1058,7 +1064,7 @@ class GitProvider: if failhard: raise return - elif pid and pid_exists(pid): + elif pid and salt.utils.process.os_is_running(pid): log.warning( "Process %d has a %s %s lock (%s) on machine_id %s", pid, @@ -3680,11 +3686,17 @@ def gitfs_zombie_cleanup(cache_dir): log.warning( f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'" ) - print(f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'") + print( + f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'", + flush=True, + ) cur_pid = os.getpid() mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") log.debug("exiting for process id %s and machine identifer %s", cur_pid, mach_id) - print(f"exiting for process id '{cur_pid}' and machine identifer '{mach_id}'") + print( + f"DGM exiting for process id '{cur_pid}' and machine identifer '{mach_id}'", + flush=True, + ) # need to clean up any resources left around like lock files if using gitfs # example: lockfile diff --git a/salt/utils/process.py b/salt/utils/process.py index 16558c2236f..af3a900b322 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -51,6 +51,9 @@ try: except ImportError: HAS_SETPROCTITLE = False +## DGM +_INTERNAL_PROCESS_ZOMBIE_LIST = [] + def appendproctitle(name): """ @@ -530,6 +533,9 @@ class ProcessManager: target=tgt, args=args, kwargs=kwargs, name=name or tgt.__qualname__ ) + ## DGM try cleaning up call + process.register_finalize_method(cleanup_zombie_process, args, kwargs) + if isinstance(process, SignalHandlingProcess): with default_signals(signal.SIGINT, signal.SIGTERM): process.start() @@ -684,6 +690,7 @@ class ProcessManager: else: for pid, p_map in self._process_map.copy().items(): log.trace("Terminating pid %s: %s", pid, p_map["Process"]) + print("Terminating pid %s: %s", pid, p_map["Process"]) if args: # escalate the signal to the process try: @@ -706,9 +713,11 @@ class ProcessManager: end_time = time.time() + self.wait_for_kill # when to die log.trace("Waiting to kill process manager children") + print("Waiting to kill process manager children") while self._process_map and time.time() < end_time: for pid, p_map in self._process_map.copy().items(): log.trace("Joining pid %s: %s", pid, p_map["Process"]) + print("Joining pid %s: %s", pid, p_map["Process"]) p_map["Process"].join(0) if not p_map["Process"].is_alive(): @@ -757,7 +766,15 @@ class ProcessManager: for (k, v) in self._process_map.items() ), ) + print( + "Some processes failed to respect the KILL signal: %s", + "; ".join( + "Process: {} (Pid: {})".format(v["Process"], k) + for (k, v) in self._process_map.items() + ), + ) log.info("kill_children retries left: %s", available_retries) + print("kill_children retries left: %s", available_retries) kwargs["retry"] = available_retries - 1 return self.kill_children(*args, **kwargs) else: @@ -771,18 +788,34 @@ class ProcessManager: ) in self._process_map.items() ), ) + print( + "Failed to kill the following processes: %s", + "; ".join( + "Process: {} (Pid: {})".format(v["Process"], k) + for ( + k, + v, + ) in self._process_map.items() + ), + ) log.warning( "Salt will either fail to terminate now or leave some " "zombie processes behind" ) + print( + "Salt will either fail to terminate now or leave some " + "zombie processes behind" + ) def terminate(self): """ Properly terminate this process manager instance """ + print("DGM class ProcessManager terminate entry", flush=True) self.stop_restarting() self.send_signal_to_processes(signal.SIGTERM) self.kill_children() + print("DGM class ProcessManager terminate exit", flush=True) def _handle_signals(self, *args, **kwargs): # first lets reset signal handlers to default one to prevent running this twice @@ -1023,7 +1056,8 @@ class Process(multiprocessing.Process): f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'" ) print( - f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'" + f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'", + flush=True, ) try: method(*args, **kwargs) @@ -1062,7 +1096,8 @@ class Process(multiprocessing.Process): f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'" ) print( - f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'" + f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'", + flush=True, ) finalize_method_tuple = (function, args, kwargs) if finalize_method_tuple not in self._finalize_methods: @@ -1070,7 +1105,8 @@ class Process(multiprocessing.Process): f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'" ) print( - f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'" + f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'", + flush=True, ) self._finalize_methods.append(finalize_method_tuple) @@ -1098,6 +1134,8 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) + print(f"DGM class SignalHandlingProcess, _handle_signals {msg}", flush=True) + ## DGM mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") ## DGM log.debug( ## DGM "exiting for process id %s and machine identifer %s", os.getpid(), mach_id @@ -1105,6 +1143,36 @@ class SignalHandlingProcess(Process): ## DGM ## DGM cur_pid = os.getpid() + # Run any registered process finalization routines + print( + "DGM class SignalHandlingProcess, attempt to print out _finalize_methods", + flush=True, + ) + for method, args, kwargs in self._finalize_methods: + # pylint: disable=logging-fstring-interpolation + log.warning( + f"DGM class SignalHandlingProcess, method '{method}', args '{args}', kwargs '{kwargs}'" + ) + print( + f"DGM class SignalHandlingProcess, method '{method}', args '{args}', kwargs '{kwargs}', flush=True" + ) + try: + method(*args, **kwargs) + except Exception: # pylint: disable=broad-except + log.exception( + "Failed to run finalize callback on %s; method=%r; args=%r; and kwargs=%r", + self, + method, + args, + kwargs, + ) + continue + + print( + "DGM class SignalHandlingProcess, done to print out _finalize_methods", + flush=True, + ) + if HAS_PSUTIL: try: process = psutil.Process(os.getpid()) @@ -1292,3 +1360,72 @@ class SubprocessList: self.processes.remove(proc) self.count -= 1 log.debug("Subprocess %s cleaned up", proc.name) + + +def cleanup_zombie_process(*args, **kwargs): + """ + Generic process to allow for any registered process cleanup routines to execute. + + While class Process has a register_finalize_method, when a process is looked up by pid + using psutil.Process, there is no method available to register a cleanup process. + + Hence, this function is added as part of the add_process to allow usage of other cleanup processes + which cannot be added by the register_finalize_method. + """ + + print("\nDGM cleanup_zombie_process entry\n", flush=True) + + # Run any register process cleanup routines + for method, args, kwargs in _INTERNAL_PROCESS_ZOMBIE_LIST: + # pylint: disable=logging-fstring-interpolation + log.warning( + f"DGM cleanup_zombie_process, method '{method}', args '{args}', kwargs '{kwargs}'" + ) + print( + f"DGM cleanup_zombie_process, method '{method}', args '{args}', kwargs '{kwargs}'", + flush=True, + ) + try: + method(*args, **kwargs) + except Exception: # pylint: disable=broad-except + log.exception( + "Failed to run registered function finalize callback; method=%r; args=%r; and kwargs=%r", + method, + args, + kwargs, + ) + continue + + print("\nDGM cleanup_zombie_process exit\n", flush=True) + + +def register_cleanup_zombie_function(function, *args, **kwargs): + """ + Register a function to run as process terminates + + While class Process has a register_finalize_method, when a process is looked up by pid + using psutil.Process, there is no method available to register a cleanup process. + + Hence, this function can be used to register a function to allow cleanup processes + which cannot be added by the register_finalize_method. + + Note: there is no deletion, since it is assummed that if something is registered, it will continue to be used + """ + # pylint: disable=logging-fstring-interpolation + log.warning( + f"DGM register_cleanup_zombie_function entry, function '{function}', args '{args}', kwargs '{kwargs}'" + ) + print( + f"DGM register_cleanup_zombie_function entry, function '{function}', args '{args}', kwargs '{kwargs}'", + flush=True, + ) + finalize_function_tuple = (function, args, kwargs) + if finalize_function_tuple not in _INTERNAL_PROCESS_ZOMBIE_LIST: + log.warning( + f"DGM register_cleanup_zombie_function, appending tuple finalize_function_tuple '{finalize_function_tuple}'" + ) + print( + f"DGM register_cleanup_zombie_function, appending tuple finalize_function_tuple '{finalize_function_tuple}'", + flush=True, + ) + _INTERNAL_PROCESS_ZOMBIE_LIST.append(finalize_function_tuple) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index eb840b5564a..075b872b89b 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -9,7 +9,7 @@ import pathlib import signal import time -import psutil +## import psutil import pytest from saltfactories.utils import random_string @@ -552,6 +552,9 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): lockfile = self.provider._get_lock_file() log.debug("KillProcessTest acquried lock file %s", lockfile) + killtest_pid = os.getpid() + print(f"KillProcessTest pid '{killtest_pid}', acquried lock file '{lockfile}'") + # check that lock has been released assert self.provider._master_lock.acquire(timeout=5) @@ -571,7 +574,7 @@ def test_git_provider_sigterm_cleanup( then kill the process via SIGTERM and ensure locked resources are cleaned up """ log.warning("DGM test_git_provider_sigterm_cleanup entry") - print("DGM test_git_provider_sigterm_cleanup entry") + print("DGM test_git_provider_sigterm_cleanup entry", flush=True) provider = main_class.remotes[0] @@ -585,40 +588,45 @@ def test_git_provider_sigterm_cleanup( log.warning( f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}'" ) - print(f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}'") - print("DGM test area entry") + ## DGM print("DGM test area entry\n\n\n\n", flush=True) - dgm_proc_dir = dir(proc) - print( - f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}', proc dir '{dgm_proc_dir}'" - ) + ## DGM dgm_proc_dir = dir(proc) + ## DGM print(f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}', proc dir '{dgm_proc_dir}'", flush=True) - dgm_pid = proc.pid - print(f"DGM test_git_provider_sigterm_cleanup, proc pid '{proc.pid}'") + ## DGM dgm_pid = proc.pid + ## DGM print(f"DGM test_git_provider_sigterm_cleanup, proc pid '{proc.pid}'", flush=True) - dgm_process = psutil.Process(dgm_pid) - print(f"DGM test_git_provider_sigterm_cleanup, psutil process '{dgm_process}'") + ## DGM dgm_process = psutil.Process(dgm_pid) + ## DGM print(f"DGM test_git_provider_sigterm_cleanup, psutil process '{dgm_process}'", flush=True) - if isinstance(proc, salt.utils.process.Process): - print( - "DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is TRUE" - ) - else: - print( - "DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is FALSE" - ) + ## DGM dgm_process_dir = dir(dgm_process) + ## DGM print(f"DGM test_git_provider_sigterm_cleanup, psutil process '{dgm_process}', process dir '{dgm_process_dir}'", flush=True) + ## DGM ## DGM print(f"DGM test_git_provider_sigterm_cleanup, checking values psutil process '{dgm_process}', pid '{dgm_process.pid}', name '{dgm_process.name()}', username '{dgm_process.username()}', as_dict '{dgm_process.as_dict()}', cmdline '{dgm_process.cmdline()}'", flush=True) + ## DGM print(f"DGM test_git_provider_sigterm_cleanup, checking values psutil process '{dgm_process}', pid '{dgm_process.pid}', ppid '{dgm_process.ppid}', name '{dgm_process.name()}', username '{dgm_process.username()}', cmdline '{dgm_process.cmdline()}'", flush=True) - if isinstance(dgm_process, salt.utils.process.Process): - print( - "DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is TRUE" - ) - else: - print( - "DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is FALSE" - ) + ## DGM dgm_dict = dgm_process.as_dict() + ## DGM dgm_process_parent_pid = dgm_dict["ppid"] + ## DGM dgm_process_parent = psutil.Process(dgm_process_parent_pid) + ## DGM dgm_process_parent_dir = dir(dgm_process_parent) + ## DGM print(f"DGM test_git_provider_sigterm_cleanup, parent pid '{dgm_process_parent_pid}' psutil process '{dgm_process_parent}', name '{dgm_process_parent.name()}', cmdline '{dgm_process_parent.cmdline()}', dir '{dgm_process_parent_dir}'", flush=True) - print("DGM test area exit") + ## DGM if isinstance(proc, salt.utils.process.Process): + ## DGM print("DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is TRUE", flush=True) + ## DGM else: + ## DGM print("DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is FALSE", flush=True) + + ## DGM if isinstance(dgm_process, salt.utils.process.Process): + ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is TRUE", flush=True) + ## DGM else: + ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is FALSE", flush=True) + + ## DGM if isinstance(dgm_process_parent, salt.utils.process.ProcessManager): + ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process ProcessManager is TRUE", flush=True) + ## DGM else: + ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process ProcessManager is FALSE", flush=True) + + ## DGM print("DGM test area exit\n\n\n\n", flush=True) procmgr.run(asynchronous=True) @@ -628,14 +636,17 @@ def test_git_provider_sigterm_cleanup( file_name = provider._get_lock_file("update") log.warning(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'") - print(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'") + print(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'", flush=True) assert pathlib.Path(file_name).exists() assert pathlib.Path(file_name).is_file() + print("DGM test_git_provider_sigterm_cleanup, terminate procmgr start", flush=True) procmgr.terminate() # sends a SIGTERM time.sleep(2) # give some time for it to terminate + print("DGM test_git_provider_sigterm_cleanup, terminate procmgr exit", flush=True) + assert not proc.is_alive() assert not pathlib.Path(file_name).exists() From 332379d06af6bbccb4e881f572042debad6de5e6 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 22 May 2024 13:32:54 -0600 Subject: [PATCH 052/160] Removed debugging statements --- salt/utils/gitfs.py | 48 +--- salt/utils/process.py | 235 ++----------------- tests/pytests/unit/utils/test_gitfs_locks.py | 58 +---- 3 files changed, 26 insertions(+), 315 deletions(-) diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index ee615bae49b..8cfdfd0e614 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -251,15 +251,13 @@ class GitProvider: override_params, cache_root, role="gitfs", - ): # pylint: disable=logging-fstring-interpolation + ): self.opts = opts self.role = role def _val_cb(x, y): return str(y) - ## DGM print(f"DGM class GitProvider dunder init, opts '{opts}'", flush=True) - # get machine_identifier self.mach_id = _get_machine_identifier().get( "machine_id", "no_machine_id_available" @@ -531,32 +529,11 @@ class GitProvider: cur_pid = os.getpid() process = psutil.Process(cur_pid) dgm_process_dir = dir(process) - log.warning( - f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}'" - ) - print( - f"DGM class GitProvider dunder init, cur_pid '{cur_pid}', process '{process}', process dir '{dgm_process_dir}'", - flush=True, - ) cache_dir = self.opts.get("cachedir", None) gitfs_active = self.opts.get("gitfs_remotes", None) - log.warning( - f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'" - ) - print( - f"DGM class GitProvider dunder init, cache_dir '{cache_dir}', gitfs_active '{gitfs_active}'", - flush=True, - ) if cache_dir and gitfs_active: - log.warning( - f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'" - ) - print( - f"DGM class GitProvider registering gitfs_zombie_cleanup with cache_dir '{cache_dir}'", - flush=True, - ) - salt.utils.process.register_cleanup_zombie_function( - gitfs_zombie_cleanup, cache_dir + salt.utils.process.register_cleanup_finalize_function( + gitfs_finalize_cleanup, cache_dir ) def get_cache_basehash(self): @@ -3676,27 +3653,12 @@ class WinRepo(GitBase): self.winrepo_dirs[repo.id] = cachedir -## DGM wip code -# pylint: disable=logging-fstring-interpolation -def gitfs_zombie_cleanup(cache_dir): +def gitfs_finalize_cleanup(cache_dir): """ - Clean up zombie processes that used gitfs - Initial wip + Clean up finalize processes that used gitfs """ - log.warning( - f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'" - ) - print( - f"DGM class GitProvider gitfs_zombie_cleanup entry, cache_dir '{cache_dir}'", - flush=True, - ) cur_pid = os.getpid() mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") - log.debug("exiting for process id %s and machine identifer %s", cur_pid, mach_id) - print( - f"DGM exiting for process id '{cur_pid}' and machine identifer '{mach_id}'", - flush=True, - ) # need to clean up any resources left around like lock files if using gitfs # example: lockfile diff --git a/salt/utils/process.py b/salt/utils/process.py index af3a900b322..3578005266e 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -30,10 +30,6 @@ import salt.utils.versions from salt.ext.tornado import gen from salt.utils.platform import get_machine_identifier as _get_machine_identifier -## DGM import pathlib -## DGM import shutil - - log = logging.getLogger(__name__) HAS_PSUTIL = False @@ -51,8 +47,8 @@ try: except ImportError: HAS_SETPROCTITLE = False -## DGM -_INTERNAL_PROCESS_ZOMBIE_LIST = [] +# Process finalization function list +_INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST = [] def appendproctitle(name): @@ -533,8 +529,7 @@ class ProcessManager: target=tgt, args=args, kwargs=kwargs, name=name or tgt.__qualname__ ) - ## DGM try cleaning up call - process.register_finalize_method(cleanup_zombie_process, args, kwargs) + process.register_finalize_method(cleanup_finalize_process, args, kwargs) if isinstance(process, SignalHandlingProcess): with default_signals(signal.SIGINT, signal.SIGTERM): @@ -690,7 +685,6 @@ class ProcessManager: else: for pid, p_map in self._process_map.copy().items(): log.trace("Terminating pid %s: %s", pid, p_map["Process"]) - print("Terminating pid %s: %s", pid, p_map["Process"]) if args: # escalate the signal to the process try: @@ -713,11 +707,9 @@ class ProcessManager: end_time = time.time() + self.wait_for_kill # when to die log.trace("Waiting to kill process manager children") - print("Waiting to kill process manager children") while self._process_map and time.time() < end_time: for pid, p_map in self._process_map.copy().items(): log.trace("Joining pid %s: %s", pid, p_map["Process"]) - print("Joining pid %s: %s", pid, p_map["Process"]) p_map["Process"].join(0) if not p_map["Process"].is_alive(): @@ -766,15 +758,7 @@ class ProcessManager: for (k, v) in self._process_map.items() ), ) - print( - "Some processes failed to respect the KILL signal: %s", - "; ".join( - "Process: {} (Pid: {})".format(v["Process"], k) - for (k, v) in self._process_map.items() - ), - ) log.info("kill_children retries left: %s", available_retries) - print("kill_children retries left: %s", available_retries) kwargs["retry"] = available_retries - 1 return self.kill_children(*args, **kwargs) else: @@ -788,34 +772,18 @@ class ProcessManager: ) in self._process_map.items() ), ) - print( - "Failed to kill the following processes: %s", - "; ".join( - "Process: {} (Pid: {})".format(v["Process"], k) - for ( - k, - v, - ) in self._process_map.items() - ), - ) log.warning( "Salt will either fail to terminate now or leave some " "zombie processes behind" ) - print( - "Salt will either fail to terminate now or leave some " - "zombie processes behind" - ) def terminate(self): """ Properly terminate this process manager instance """ - print("DGM class ProcessManager terminate entry", flush=True) self.stop_restarting() self.send_signal_to_processes(signal.SIGTERM) self.kill_children() - print("DGM class ProcessManager terminate exit", flush=True) def _handle_signals(self, *args, **kwargs): # first lets reset signal handlers to default one to prevent running this twice @@ -1051,14 +1019,6 @@ class Process(multiprocessing.Process): # Run any registered process finalization routines try: for method, args, kwargs in self._finalize_methods: - # pylint: disable=logging-fstring-interpolation - log.warning( - f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'" - ) - print( - f"DGM class Process wrapped_run_func, method '{method}', args '{args}', kwargs '{kwargs}'", - flush=True, - ) try: method(*args, **kwargs) except Exception: # pylint: disable=broad-except @@ -1091,23 +1051,8 @@ class Process(multiprocessing.Process): """ Register a function to run as process terminates """ - # pylint: disable=logging-fstring-interpolation - log.warning( - f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'" - ) - print( - f"DGM class Process register_finalize_method entry, function '{function}', args '{args}', kwargs '{kwargs}'", - flush=True, - ) finalize_method_tuple = (function, args, kwargs) if finalize_method_tuple not in self._finalize_methods: - log.warning( - f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'" - ) - print( - f"DGM register_finalize_method, appending tuple finalize_method_tuple '{finalize_method_tuple}'", - flush=True, - ) self._finalize_methods.append(finalize_method_tuple) @@ -1134,28 +1079,8 @@ class SignalHandlingProcess(Process): msg += ". Exiting" log.debug(msg) - print(f"DGM class SignalHandlingProcess, _handle_signals {msg}", flush=True) - - ## DGM mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") - ## DGM log.debug( - ## DGM "exiting for process id %s and machine identifer %s", os.getpid(), mach_id - ## DGM ) - ## DGM - ## DGM cur_pid = os.getpid() - # Run any registered process finalization routines - print( - "DGM class SignalHandlingProcess, attempt to print out _finalize_methods", - flush=True, - ) for method, args, kwargs in self._finalize_methods: - # pylint: disable=logging-fstring-interpolation - log.warning( - f"DGM class SignalHandlingProcess, method '{method}', args '{args}', kwargs '{kwargs}'" - ) - print( - f"DGM class SignalHandlingProcess, method '{method}', args '{args}', kwargs '{kwargs}', flush=True" - ) try: method(*args, **kwargs) except Exception: # pylint: disable=broad-except @@ -1168,11 +1093,6 @@ class SignalHandlingProcess(Process): ) continue - print( - "DGM class SignalHandlingProcess, done to print out _finalize_methods", - flush=True, - ) - if HAS_PSUTIL: try: process = psutil.Process(os.getpid()) @@ -1189,106 +1109,6 @@ class SignalHandlingProcess(Process): os.getpid(), ) - ## DGM # need to clean up any resources left around like lock files if using gitfs - ## DGM # example: lockfile i - ## DGM # /var/cache/salt/master/gitfs/work/NlJQs6Pss_07AugikCrmqfmqEFrfPbCDBqGLBiCd3oU=/_/update.lk - ## DGM cache_dir = self.opts.get("cachedir", None) - ## DGM gitfs_active = self.opts.get("gitfs_remotes", None) - ## DGM if cache_dir and gitfs_active: - ## DGM # check for gitfs file locks to ensure no resource leaks - ## DGM # last chance to clean up any missed unlock droppings - ## DGM cache_dir = pathlib.Path(cache_dir + "/gitfs/work") - ## DGM if cache_dir.exists and cache_dir.is_dir(): - ## DGM file_list = list(cache_dir.glob("**/*.lk")) - ## DGM file_del_list = [] - ## DGM file_pid = 0 - ## DGM file_mach_id = 0 - ## DGM try: - ## DGM for file_name in file_list: - ## DGM with salt.utils.files.fopen(file_name, "r") as fd_: - ## DGM try: - ## DGM file_pid = int( - ## DGM salt.utils.stringutils.to_unicode( - ## DGM fd_.readline() - ## DGM ).rstrip() - ## DGM ) - ## DGM except ValueError: - ## DGM # Lock file is empty, set pid to 0 so it evaluates as False. - ## DGM file_pid = 0 - ## DGM try: - ## DGM file_mach_id = ( - ## DGM salt.utils.stringutils.to_unicode( - ## DGM fd_.readline() - ## DGM ).rstrip() - ## DGM ) - ## DGM except ValueError: - ## DGM # Lock file is empty, set mach_id to 0 so it evaluates False. - ## DGM file_mach_id = 0 - - ## DGM if cur_pid == file_pid: - ## DGM if mach_id != file_mach_id: - ## DGM if not file_mach_id: - ## DGM msg = ( - ## DGM f"gitfs lock file for pid '{file_pid}' does not " - ## DGM "contain a machine id, deleting lock file which may " - ## DGM "affect if using multi-master with shared gitfs cache, " - ## DGM "the lock may have been obtained by another master " - ## DGM "recommend updating Salt version on other masters to a " - ## DGM "version which insert machine identification in lock a file." - ## DGM ) - ## DGM log.debug(msg) - ## DGM file_del_list.append( - ## DGM (file_name, file_pid, file_mach_id) - ## DGM ) - ## DGM else: - ## DGM file_del_list.append( - ## DGM (file_name, file_pid, file_mach_id) - ## DGM ) - - ## DGM except FileNotFoundError: - ## DGM log.debug("gitfs lock file: %s not found", file_name) - - ## DGM for file_name, file_pid, file_mach_id in file_del_list: - ## DGM try: - ## DGM os.remove(file_name) - ## DGM except OSError as exc: - ## DGM if exc.errno == errno.ENOENT: - ## DGM # No lock file present - ## DGM msg = ( - ## DGM "SIGTERM clean up of resources attempted to remove lock " - ## DGM f"file {file_name}, pid '{file_pid}', machine identifier " - ## DGM f"'{mach_id}' but it did not exist, exception : {exc} " - ## DGM ) - ## DGM log.debug(msg) - - ## DGM elif exc.errno == errno.EISDIR: - ## DGM # Somehow this path is a directory. Should never happen - ## DGM # unless some wiseguy manually creates a directory at this - ## DGM # path, but just in case, handle it. - ## DGM try: - ## DGM shutil.rmtree(file_name) - ## DGM except OSError as exc: - ## DGM msg = ( - ## DGM f"SIGTERM clean up of resources, lock file '{file_name}'" - ## DGM f", pid '{file_pid}', machine identifier '{file_mach_id}'" - ## DGM f"was a directory, removed directory, exception : '{exc}'" - ## DGM ) - ## DGM log.debug(msg) - ## DGM else: - ## DGM msg = ( - ## DGM "SIGTERM clean up of resources, unable to remove lock file " - ## DGM f"'{file_name}', pid '{file_pid}', machine identifier " - ## DGM f"'{file_mach_id}', exception : '{exc}'" - ## DGM ) - ## DGM log.debug(msg) - ## DGM else: - ## DGM msg = ( - ## DGM "SIGTERM clean up of resources, removed lock file " - ## DGM f"'{file_name}', pid '{file_pid}', machine identifier " - ## DGM f"'{file_mach_id}'" - ## DGM ) - ## DGM log.debug(msg) - except psutil.NoSuchProcess: log.warning( "Unable to kill children of process %d, it does not exist." @@ -1362,7 +1182,7 @@ class SubprocessList: log.debug("Subprocess %s cleaned up", proc.name) -def cleanup_zombie_process(*args, **kwargs): +def cleanup_finalize_process(*args, **kwargs): """ Generic process to allow for any registered process cleanup routines to execute. @@ -1373,17 +1193,13 @@ def cleanup_zombie_process(*args, **kwargs): which cannot be added by the register_finalize_method. """ - print("\nDGM cleanup_zombie_process entry\n", flush=True) - - # Run any register process cleanup routines - for method, args, kwargs in _INTERNAL_PROCESS_ZOMBIE_LIST: - # pylint: disable=logging-fstring-interpolation - log.warning( - f"DGM cleanup_zombie_process, method '{method}', args '{args}', kwargs '{kwargs}'" - ) - print( - f"DGM cleanup_zombie_process, method '{method}', args '{args}', kwargs '{kwargs}'", - flush=True, + # Run any registered process cleanup routines + for method, args, kwargs in _INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST: + log.debug( + "cleanup_finalize_process, method=%r, args=%r, kwargs=%r", + method, + args, + kwargs, ) try: method(*args, **kwargs) @@ -1396,10 +1212,8 @@ def cleanup_zombie_process(*args, **kwargs): ) continue - print("\nDGM cleanup_zombie_process exit\n", flush=True) - -def register_cleanup_zombie_function(function, *args, **kwargs): +def register_cleanup_finalize_function(function, *args, **kwargs): """ Register a function to run as process terminates @@ -1407,25 +1221,16 @@ def register_cleanup_zombie_function(function, *args, **kwargs): using psutil.Process, there is no method available to register a cleanup process. Hence, this function can be used to register a function to allow cleanup processes - which cannot be added by the register_finalize_method. + which cannot be added by class Process register_finalize_method. Note: there is no deletion, since it is assummed that if something is registered, it will continue to be used """ - # pylint: disable=logging-fstring-interpolation - log.warning( - f"DGM register_cleanup_zombie_function entry, function '{function}', args '{args}', kwargs '{kwargs}'" - ) - print( - f"DGM register_cleanup_zombie_function entry, function '{function}', args '{args}', kwargs '{kwargs}'", - flush=True, + log.debug( + "register_cleanup_finalize_function entry, function=%r, args=%r, kwargs=%r", + function, + args, + kwargs, ) finalize_function_tuple = (function, args, kwargs) - if finalize_function_tuple not in _INTERNAL_PROCESS_ZOMBIE_LIST: - log.warning( - f"DGM register_cleanup_zombie_function, appending tuple finalize_function_tuple '{finalize_function_tuple}'" - ) - print( - f"DGM register_cleanup_zombie_function, appending tuple finalize_function_tuple '{finalize_function_tuple}'", - flush=True, - ) - _INTERNAL_PROCESS_ZOMBIE_LIST.append(finalize_function_tuple) + if finalize_function_tuple not in _INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST: + _INTERNAL_PROCESS_FINALIZE_FUNCTION_LIST.append(finalize_function_tuple) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 075b872b89b..241f61cefbc 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -9,7 +9,6 @@ import pathlib import signal import time -## import psutil import pytest from saltfactories.utils import random_string @@ -553,7 +552,6 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): log.debug("KillProcessTest acquried lock file %s", lockfile) killtest_pid = os.getpid() - print(f"KillProcessTest pid '{killtest_pid}', acquried lock file '{lockfile}'") # check that lock has been released assert self.provider._master_lock.acquire(timeout=5) @@ -566,16 +564,11 @@ class KillProcessTest(salt.utils.process.SignalHandlingProcess): @pytest.mark.slow_test @pytest.mark.skip_unless_on_linux @pytest.mark.timeout_unless_on_windows(120) -def test_git_provider_sigterm_cleanup( - main_class, -): # pylint: disable=logging-fstring-interpolation +def test_git_provider_sigterm_cleanup(main_class): """ Start process which will obtain lock, and leave it locked then kill the process via SIGTERM and ensure locked resources are cleaned up """ - log.warning("DGM test_git_provider_sigterm_cleanup entry") - print("DGM test_git_provider_sigterm_cleanup entry", flush=True) - provider = main_class.remotes[0] with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): @@ -585,49 +578,6 @@ def test_git_provider_sigterm_cleanup( while not proc.is_alive(): time.sleep(1) # give some time for it to be started - log.warning( - f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}'" - ) - - ## DGM print("DGM test area entry\n\n\n\n", flush=True) - - ## DGM dgm_proc_dir = dir(proc) - ## DGM print(f"DGM test_git_provider_sigterm_cleanup, post add_process, proc '{proc}', proc dir '{dgm_proc_dir}'", flush=True) - - ## DGM dgm_pid = proc.pid - ## DGM print(f"DGM test_git_provider_sigterm_cleanup, proc pid '{proc.pid}'", flush=True) - - ## DGM dgm_process = psutil.Process(dgm_pid) - ## DGM print(f"DGM test_git_provider_sigterm_cleanup, psutil process '{dgm_process}'", flush=True) - - ## DGM dgm_process_dir = dir(dgm_process) - ## DGM print(f"DGM test_git_provider_sigterm_cleanup, psutil process '{dgm_process}', process dir '{dgm_process_dir}'", flush=True) - ## DGM ## DGM print(f"DGM test_git_provider_sigterm_cleanup, checking values psutil process '{dgm_process}', pid '{dgm_process.pid}', name '{dgm_process.name()}', username '{dgm_process.username()}', as_dict '{dgm_process.as_dict()}', cmdline '{dgm_process.cmdline()}'", flush=True) - ## DGM print(f"DGM test_git_provider_sigterm_cleanup, checking values psutil process '{dgm_process}', pid '{dgm_process.pid}', ppid '{dgm_process.ppid}', name '{dgm_process.name()}', username '{dgm_process.username()}', cmdline '{dgm_process.cmdline()}'", flush=True) - - ## DGM dgm_dict = dgm_process.as_dict() - ## DGM dgm_process_parent_pid = dgm_dict["ppid"] - ## DGM dgm_process_parent = psutil.Process(dgm_process_parent_pid) - ## DGM dgm_process_parent_dir = dir(dgm_process_parent) - ## DGM print(f"DGM test_git_provider_sigterm_cleanup, parent pid '{dgm_process_parent_pid}' psutil process '{dgm_process_parent}', name '{dgm_process_parent.name()}', cmdline '{dgm_process_parent.cmdline()}', dir '{dgm_process_parent_dir}'", flush=True) - - ## DGM if isinstance(proc, salt.utils.process.Process): - ## DGM print("DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is TRUE", flush=True) - ## DGM else: - ## DGM print("DGM test_git_provider_sigterm_cleanup, proc isinstance salt utils process Process is FALSE", flush=True) - - ## DGM if isinstance(dgm_process, salt.utils.process.Process): - ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is TRUE", flush=True) - ## DGM else: - ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process Process is FALSE", flush=True) - - ## DGM if isinstance(dgm_process_parent, salt.utils.process.ProcessManager): - ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process ProcessManager is TRUE", flush=True) - ## DGM else: - ## DGM print("DGM test_git_provider_sigterm_cleanup, process isinstance salt utils process ProcessManager is FALSE", flush=True) - - ## DGM print("DGM test area exit\n\n\n\n", flush=True) - procmgr.run(asynchronous=True) time.sleep(2) # give some time for it to terminate @@ -635,18 +585,12 @@ def test_git_provider_sigterm_cleanup( # child process should be alive file_name = provider._get_lock_file("update") - log.warning(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'") - print(f"DGM test_git_provider_sigterm_cleanup, file_name '{file_name}'", flush=True) - assert pathlib.Path(file_name).exists() assert pathlib.Path(file_name).is_file() - print("DGM test_git_provider_sigterm_cleanup, terminate procmgr start", flush=True) procmgr.terminate() # sends a SIGTERM time.sleep(2) # give some time for it to terminate - print("DGM test_git_provider_sigterm_cleanup, terminate procmgr exit", flush=True) - assert not proc.is_alive() assert not pathlib.Path(file_name).exists() From e74ba6f80227d80d3a1e91f7ed589c4093b1add1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 5 Jun 2024 01:21:26 -0700 Subject: [PATCH 053/160] Make windows pkg builds more reliable --- pkg/windows/install_vs_buildtools.ps1 | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/pkg/windows/install_vs_buildtools.ps1 b/pkg/windows/install_vs_buildtools.ps1 index 1d51058d2f1..b92751550ce 100644 --- a/pkg/windows/install_vs_buildtools.ps1 +++ b/pkg/windows/install_vs_buildtools.ps1 @@ -119,12 +119,12 @@ if ( $install_build_tools ) { # Hash: 3b1efd3a66ea28b16697394703a72ca340a05bd5 if (! (Test-Path -Path Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5) ) { Write-Host "Installing Certificate Sign Root Certificate: " -NoNewLine - Start-Process -FilePath "certutil" ` + $proc = Start-Process -FilePath "certutil" ` -ArgumentList "-addstore", ` "Root", ` "$($env:TEMP)\build_tools\certificates\manifestCounterSignRootCertificate.cer" ` - -Wait -WindowStyle Hidden - if ( Test-Path -Path Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5 ) { + -PassThru -Wait -WindowStyle Hidden + if ( $proc.ExitCode -eq 0 ) { Write-Result "Success" -ForegroundColor Green } else { Write-Result "Failed" -ForegroundColor Yellow @@ -135,12 +135,12 @@ if ( $install_build_tools ) { # Hash: 8f43288ad272f3103b6fb1428485ea3014c0bcfe if (! (Test-Path -Path Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe) ) { Write-Host "Installing Certificate Root Certificate: " -NoNewLine - Start-Process -FilePath "certutil" ` + $proc = Start-Process -FilePath "certutil" ` -ArgumentList "-addstore", ` "Root", ` "$($env:TEMP)\build_tools\certificates\manifestRootCertificate.cer" ` - -Wait -WindowStyle Hidden - if ( Test-Path -Path Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe ) { + -PassThru -Wait -WindowStyle Hidden + if ( $proc.ExitCode -eq 0 ) { Write-Result "Success" -ForegroundColor Green } else { Write-Result "Failed" -ForegroundColor Yellow @@ -148,14 +148,13 @@ if ( $install_build_tools ) { } Write-Host "Installing Visual Studio 2017 build tools: " -NoNewline - Start-Process -FilePath "$env:TEMP\build_tools\vs_setup.exe" ` + $proc = Start-Process -FilePath "$env:TEMP\build_tools\vs_setup.exe" ` -ArgumentList "--wait", "--noweb", "--quiet" ` - -Wait - @($VS_CL_BIN, $MSBUILD_BIN, $WIN10_SDK_RC) | ForEach-Object { - if ( ! (Test-Path -Path $_) ) { - Write-Result "Failed" -ForegroundColor Red - exit 1 - } + -Wait -PassThru + if ( $proc.ExitCode -eq 0 ) { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Yellow } Write-Result "Success" -ForegroundColor Green } else { From 00c45646d345ecb8af369bc13a3b34d9a050a00a Mon Sep 17 00:00:00 2001 From: David Murphy Date: Tue, 4 Jun 2024 09:10:28 -0600 Subject: [PATCH 054/160] Fix nightly build test missed in PR 65937 --- tests/pytests/functional/utils/test_winrepo.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/tests/pytests/functional/utils/test_winrepo.py b/tests/pytests/functional/utils/test_winrepo.py index 117d995bba6..35269f36f5e 100644 --- a/tests/pytests/functional/utils/test_winrepo.py +++ b/tests/pytests/functional/utils/test_winrepo.py @@ -5,6 +5,7 @@ import pytest from salt.runners.winrepo import GLOBAL_ONLY, PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES from salt.utils.gitfs import GitPython, Pygit2, WinRepo from salt.utils.immutabletypes import ImmutableDict, ImmutableList +from salt.utils.platform import get_machine_identifier as _get_machine_identifier pytestmark = [ pytest.mark.slow_test, @@ -130,6 +131,7 @@ def test_pygit2_remote_map(pygit2_winrepo_opts): def _test_lock(opts): + mach_id = _get_machine_identifier().get("machine_id", "no_machine_id_available") w = _get_winrepo( opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git", @@ -140,14 +142,18 @@ def _test_lock(opts): assert repo.get_salt_working_dir() in repo._get_lock_file() assert repo.lock() == ( [ - "Set update lock for winrepo remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ( + f"Set update lock for winrepo remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ) ], [], ) assert os.path.isfile(repo._get_lock_file()) assert repo.clear_lock() == ( [ - "Removed update lock for winrepo remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ( + f"Removed update lock for winrepo remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git' on machine_id '{mach_id}'" + ) ], [], ) From c7e18526f6780d69abe8ff5225d28b1734f65b46 Mon Sep 17 00:00:00 2001 From: David Murphy Date: Tue, 4 Jun 2024 16:08:17 -0600 Subject: [PATCH 055/160] Trying Windows fix, removing $() on env:TEMP --- pkg/windows/install_vs_buildtools.ps1 | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/windows/install_vs_buildtools.ps1 b/pkg/windows/install_vs_buildtools.ps1 index 1d51058d2f1..49eb5125568 100644 --- a/pkg/windows/install_vs_buildtools.ps1 +++ b/pkg/windows/install_vs_buildtools.ps1 @@ -122,7 +122,7 @@ if ( $install_build_tools ) { Start-Process -FilePath "certutil" ` -ArgumentList "-addstore", ` "Root", ` - "$($env:TEMP)\build_tools\certificates\manifestCounterSignRootCertificate.cer" ` + "$env:TEMP\build_tools\certificates\manifestCounterSignRootCertificate.cer" ` -Wait -WindowStyle Hidden if ( Test-Path -Path Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5 ) { Write-Result "Success" -ForegroundColor Green @@ -138,7 +138,7 @@ if ( $install_build_tools ) { Start-Process -FilePath "certutil" ` -ArgumentList "-addstore", ` "Root", ` - "$($env:TEMP)\build_tools\certificates\manifestRootCertificate.cer" ` + "$env:TEMP\build_tools\certificates\manifestRootCertificate.cer" ` -Wait -WindowStyle Hidden if ( Test-Path -Path Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe ) { Write-Result "Success" -ForegroundColor Green From c1b8d81282dbe6bedfc0d24a80c7b9f5c0ebf321 Mon Sep 17 00:00:00 2001 From: Tyler Levy Conde Date: Tue, 21 May 2024 14:31:51 -0600 Subject: [PATCH 056/160] Add tests for state compiler aggregation --- salt/state.py | 2 +- .../pytests/unit/state/test_state_compiler.py | 62 +++++++++++++++++++ 2 files changed, 63 insertions(+), 1 deletion(-) diff --git a/salt/state.py b/salt/state.py index 250b46da4f4..15f82c6b810 100644 --- a/salt/state.py +++ b/salt/state.py @@ -972,8 +972,8 @@ class State: self.state_con["loader_cache"][agg_fun] = True try: low["__agg__"] = True - low = self._aggregate_requisites(low, chunks) low = self.states[agg_fun](low, chunks, running) + low = self._aggregate_requisites(low, chunks) except TypeError: log.error("Failed to execute aggregate for state %s", low["state"]) else: diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index db241797326..322d4858b51 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -1070,6 +1070,68 @@ def test_mod_aggregate(minion_opts): # Ensure pkgs were aggregated assert low_ret["pkgs"] == ["figlet", "sl"] +def test_mod_aggregate_order(minion_opts): + """ + Test to ensure that the state_aggregate setting correctly aggregates package installations + while respecting the 'require' requisite to enforce execution order. + """ + # Setup the chunks based on the provided scenario + chunks = [ + { + "state": "pkg", + "name": "first packages", + "__id__": "first packages", + "pkgs": ["drpm"], + "fun": "installed", + "order": 1, + "__env__": "base", + "__sls__": "base", + }, + { + "state": "test", + "name": "requirement", + "__id__": "requirement", + "fun": "nop", + "order": 2, + "__env__": "base", + "__sls__": "base", + }, + { + "state": "pkg", + "name": "second packages", + "__id__": "second packages", + "pkgs": ["gc"], + "fun": "installed", + "order": 3, + "require": [{"test": "requirement"}], + "__env__": "base", + "__sls__": "base", + }, + ] + + + # Setup the State object + with patch("salt.state.State._gather_pillar"): + state_obj = salt.state.State(minion_opts) + state_obj.opts["state_aggregate"] = True # Ensure state aggregation is enabled + + # Process each chunk with _mod_aggregate to simulate state execution + running = state_obj.call_chunks(chunks) + + first_key = 'pkg_|-first packages_|-first packages_|-installed' + requirement_key = 'test_|-requirement_|-requirement_|-nop' + second_key = 'pkg_|-second packages_|-second packages_|-installed' + + # Check if the "second packages" have been executed after "requirement" + # by checking their run numbers + assert running[first_key]["__run_num__"] < running[requirement_key]["__run_num__"], "Requirement should execute before second packages" + assert running[requirement_key]["__run_num__"] < running[second_key]["__run_num__"], "Second packages should execute after requirement" + + # Further, we should verify that the "second packages" have "gc" only after "requirement" is complete + assert 'gc' in running[second_key].get('pkgs', []), "GC should be in second packages" + assert 'drpm' in running[first_key].get('pkgs', []), "DRPM should be in first packages" + + def test_verify_onlyif_cmd_opts_exclude(minion_opts): """ From bd98f0db68ed326da166399aeacc9663b732226d Mon Sep 17 00:00:00 2001 From: Tyler Levy Conde Date: Tue, 4 Jun 2024 15:11:51 -0600 Subject: [PATCH 057/160] Test successfully replicates the issue --- salt/state.py | 2 +- .../pytests/unit/state/test_state_compiler.py | 149 +++--------------- 2 files changed, 19 insertions(+), 132 deletions(-) diff --git a/salt/state.py b/salt/state.py index 15f82c6b810..250b46da4f4 100644 --- a/salt/state.py +++ b/salt/state.py @@ -972,8 +972,8 @@ class State: self.state_con["loader_cache"][agg_fun] = True try: low["__agg__"] = True - low = self.states[agg_fun](low, chunks, running) low = self._aggregate_requisites(low, chunks) + low = self.states[agg_fun](low, chunks, running) except TypeError: log.error("Failed to execute aggregate for state %s", low["state"]) else: diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index 322d4858b51..b1bfacece1c 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -855,128 +855,6 @@ def test_call_chunk_sub_state_run(minion_opts): assert sub_state["__sls__"] == "external" -def test_aggregate_requisites(minion_opts): - """ - Test to ensure that the requisites are included in the aggregated low state. - """ - # The low that is returned from _mod_aggregrate - low = { - "state": "pkg", - "name": "other_pkgs", - "__sls__": "47628", - "__env__": "base", - "__id__": "other_pkgs", - "pkgs": ["byobu", "vim", "tmux", "google-cloud-sdk"], - "aggregate": True, - "order": 10002, - "fun": "installed", - "__agg__": True, - } - - # Chunks that have been processed through the pkg mod_aggregate function - chunks = [ - { - "state": "file", - "name": "/tmp/install-vim", - "__sls__": "47628", - "__env__": "base", - "__id__": "/tmp/install-vim", - "order": 10000, - "fun": "managed", - }, - { - "state": "file", - "name": "/tmp/install-tmux", - "__sls__": "47628", - "__env__": "base", - "__id__": "/tmp/install-tmux", - "order": 10001, - "fun": "managed", - }, - { - "state": "pkg", - "name": "other_pkgs", - "__sls__": "47628", - "__env __": "base", - "__id__": "other_pkgs", - "pkgs": ["byobu"], - "aggregate": True, - "order": 10002, - "fun": "installed", - }, - { - "state": "pkg", - "name": "bc", - "__sls__": "47628", - "__env__": "base", - "__id__": "bc", - "hold": True, - "__agg__": True, - "order": 10003, - "fun": "installed", - }, - { - "state": "pkg", - "name": "vim", - "__sls__": "47628", - "__env__": "base", - "__agg__": True, - "__id__": "vim", - "require": ["/tmp/install-vim"], - "order": 10004, - "fun": "installed", - }, - { - "state": "pkg", - "name": "tmux", - "__sls__": "47628", - "__env__": "base", - "__agg__": True, - "__id__": "tmux", - "require": ["/tmp/install-tmux"], - "order": 10005, - "fun": "installed", - }, - { - "state": "pkgrepo", - "name": "deb https://packages.cloud.google.com/apt cloud-sdk main", - "__sls__": "47628", - "__env__": "base", - "__id__": "google-cloud-repo", - "humanname": "Google Cloud SDK", - "file": "/etc/apt/sources.list.d/google-cloud-sdk.list", - "key_url": "https://packages.cloud.google.com/apt/doc/apt-key.gpg", - "order": 10006, - "fun": "managed", - }, - { - "state": "pkg", - "name": "google-cloud-sdk", - "__sls__": "47628", - "__env__": "base", - "__agg__": True, - "__id__": "google-cloud-sdk", - "require": ["google-cloud-repo"], - "order": 10007, - "fun": "installed", - }, - ] - - with patch("salt.state.State._gather_pillar"): - state_obj = salt.state.State(minion_opts) - low_ret = state_obj._aggregate_requisites(low, chunks) - - # Ensure the low returned contains require - assert "require" in low_ret - - # Ensure all the requires from pkg states are in low - assert low_ret["require"] == [ - "/tmp/install-vim", - "/tmp/install-tmux", - "google-cloud-repo", - ] - - def test_mod_aggregate(minion_opts): """ Test to ensure that the requisites are included in the aggregated low state. @@ -1070,6 +948,7 @@ def test_mod_aggregate(minion_opts): # Ensure pkgs were aggregated assert low_ret["pkgs"] == ["figlet", "sl"] + def test_mod_aggregate_order(minion_opts): """ Test to ensure that the state_aggregate setting correctly aggregates package installations @@ -1104,33 +983,41 @@ def test_mod_aggregate_order(minion_opts): "fun": "installed", "order": 3, "require": [{"test": "requirement"}], + "provider": "yumpkg", "__env__": "base", "__sls__": "base", }, ] - # Setup the State object with patch("salt.state.State._gather_pillar"): state_obj = salt.state.State(minion_opts) + state_obj.load_modules(chunks[-1]) state_obj.opts["state_aggregate"] = True # Ensure state aggregation is enabled # Process each chunk with _mod_aggregate to simulate state execution running = state_obj.call_chunks(chunks) - first_key = 'pkg_|-first packages_|-first packages_|-installed' - requirement_key = 'test_|-requirement_|-requirement_|-nop' - second_key = 'pkg_|-second packages_|-second packages_|-installed' + first_key = "pkg_|-first packages_|-first packages_|-installed" + requirement_key = "test_|-requirement_|-requirement_|-nop" + second_key = "pkg_|-second packages_|-second packages_|-installed" # Check if the "second packages" have been executed after "requirement" # by checking their run numbers - assert running[first_key]["__run_num__"] < running[requirement_key]["__run_num__"], "Requirement should execute before second packages" - assert running[requirement_key]["__run_num__"] < running[second_key]["__run_num__"], "Second packages should execute after requirement" + assert ( + running[first_key]["__run_num__"] < running[requirement_key]["__run_num__"] + ), "Requirement should execute before second packages" + assert ( + running[requirement_key]["__run_num__"] < running[second_key]["__run_num__"] + ), "Second packages should execute after requirement" # Further, we should verify that the "second packages" have "gc" only after "requirement" is complete - assert 'gc' in running[second_key].get('pkgs', []), "GC should be in second packages" - assert 'drpm' in running[first_key].get('pkgs', []), "DRPM should be in first packages" - + assert "gc" in running[second_key].get( + "pkgs", [] + ), "GC should be in second packages" + assert "drpm" in running[first_key].get( + "pkgs", [] + ), "DRPM should be in first packages" def test_verify_onlyif_cmd_opts_exclude(minion_opts): From 9ea88c5bef28a63761201df4cd2995fc0cc8e5b4 Mon Sep 17 00:00:00 2001 From: Tyler Levy Conde Date: Tue, 4 Jun 2024 15:15:34 -0600 Subject: [PATCH 058/160] removed clutter --- .../pytests/unit/state/test_state_compiler.py | 122 ++++++++++++++++++ 1 file changed, 122 insertions(+) diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index b1bfacece1c..5cbebbf77c7 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -855,6 +855,128 @@ def test_call_chunk_sub_state_run(minion_opts): assert sub_state["__sls__"] == "external" +def test_aggregate_requisites(minion_opts): + """ + Test to ensure that the requisites are included in the aggregated low state. + """ + # The low that is returned from _mod_aggregrate + low = { + "state": "pkg", + "name": "other_pkgs", + "__sls__": "47628", + "__env__": "base", + "__id__": "other_pkgs", + "pkgs": ["byobu", "vim", "tmux", "google-cloud-sdk"], + "aggregate": True, + "order": 10002, + "fun": "installed", + "__agg__": True, + } + + # Chunks that have been processed through the pkg mod_aggregate function + chunks = [ + { + "state": "file", + "name": "/tmp/install-vim", + "__sls__": "47628", + "__env__": "base", + "__id__": "/tmp/install-vim", + "order": 10000, + "fun": "managed", + }, + { + "state": "file", + "name": "/tmp/install-tmux", + "__sls__": "47628", + "__env__": "base", + "__id__": "/tmp/install-tmux", + "order": 10001, + "fun": "managed", + }, + { + "state": "pkg", + "name": "other_pkgs", + "__sls__": "47628", + "__env __": "base", + "__id__": "other_pkgs", + "pkgs": ["byobu"], + "aggregate": True, + "order": 10002, + "fun": "installed", + }, + { + "state": "pkg", + "name": "bc", + "__sls__": "47628", + "__env__": "base", + "__id__": "bc", + "hold": True, + "__agg__": True, + "order": 10003, + "fun": "installed", + }, + { + "state": "pkg", + "name": "vim", + "__sls__": "47628", + "__env__": "base", + "__agg__": True, + "__id__": "vim", + "require": ["/tmp/install-vim"], + "order": 10004, + "fun": "installed", + }, + { + "state": "pkg", + "name": "tmux", + "__sls__": "47628", + "__env__": "base", + "__agg__": True, + "__id__": "tmux", + "require": ["/tmp/install-tmux"], + "order": 10005, + "fun": "installed", + }, + { + "state": "pkgrepo", + "name": "deb https://packages.cloud.google.com/apt cloud-sdk main", + "__sls__": "47628", + "__env__": "base", + "__id__": "google-cloud-repo", + "humanname": "Google Cloud SDK", + "file": "/etc/apt/sources.list.d/google-cloud-sdk.list", + "key_url": "https://packages.cloud.google.com/apt/doc/apt-key.gpg", + "order": 10006, + "fun": "managed", + }, + { + "state": "pkg", + "name": "google-cloud-sdk", + "__sls__": "47628", + "__env__": "base", + "__agg__": True, + "__id__": "google-cloud-sdk", + "require": ["google-cloud-repo"], + "order": 10007, + "fun": "installed", + }, + ] + + with patch("salt.state.State._gather_pillar"): + state_obj = salt.state.State(minion_opts) + low_ret = state_obj._aggregate_requisites(low, chunks) + + # Ensure the low returned contains require + assert "require" in low_ret + + # Ensure all the requires from pkg states are in low + assert low_ret["require"] == [ + "/tmp/install-vim", + "/tmp/install-tmux", + "google-cloud-repo", + ] + + def test_mod_aggregate(minion_opts): """ Test to ensure that the requisites are included in the aggregated low state. From 0b599713e98420c4d869ec9ec1a0972526443406 Mon Sep 17 00:00:00 2001 From: Tyler Levy Conde Date: Tue, 4 Jun 2024 15:53:36 -0600 Subject: [PATCH 059/160] Aggregate requisites AFTER aggregating states --- salt/state.py | 3 ++- .../pytests/unit/state/test_state_compiler.py | 26 ++++--------------- 2 files changed, 7 insertions(+), 22 deletions(-) diff --git a/salt/state.py b/salt/state.py index 250b46da4f4..899736c84fb 100644 --- a/salt/state.py +++ b/salt/state.py @@ -972,8 +972,9 @@ class State: self.state_con["loader_cache"][agg_fun] = True try: low["__agg__"] = True - low = self._aggregate_requisites(low, chunks) + # Aggregate the states *before* aggregating requisites otherwise there will never be requisites to aggregate low = self.states[agg_fun](low, chunks, running) + low = self._aggregate_requisites(low, chunks) except TypeError: log.error("Failed to execute aggregate for state %s", low["state"]) else: diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index 5cbebbf77c7..bfba17cd4fa 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -1118,28 +1118,12 @@ def test_mod_aggregate_order(minion_opts): state_obj.opts["state_aggregate"] = True # Ensure state aggregation is enabled # Process each chunk with _mod_aggregate to simulate state execution - running = state_obj.call_chunks(chunks) + state_obj.call_chunks(chunks) - first_key = "pkg_|-first packages_|-first packages_|-installed" - requirement_key = "test_|-requirement_|-requirement_|-nop" - second_key = "pkg_|-second packages_|-second packages_|-installed" - - # Check if the "second packages" have been executed after "requirement" - # by checking their run numbers - assert ( - running[first_key]["__run_num__"] < running[requirement_key]["__run_num__"] - ), "Requirement should execute before second packages" - assert ( - running[requirement_key]["__run_num__"] < running[second_key]["__run_num__"] - ), "Second packages should execute after requirement" - - # Further, we should verify that the "second packages" have "gc" only after "requirement" is complete - assert "gc" in running[second_key].get( - "pkgs", [] - ), "GC should be in second packages" - assert "drpm" in running[first_key].get( - "pkgs", [] - ), "DRPM should be in first packages" + first_state_low = chunks[0] + last_state_low = chunks[-1] + # Verify that the requisites got aggregated as well + assert first_state_low["require"] == last_state_low["require"] def test_verify_onlyif_cmd_opts_exclude(minion_opts): From 5c7124a0ee4dc1389c279f972c24e692c197df55 Mon Sep 17 00:00:00 2001 From: Tyler Levy Conde Date: Tue, 4 Jun 2024 15:54:51 -0600 Subject: [PATCH 060/160] Add changelog entry --- changelog/65304.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/65304.fixed.md diff --git a/changelog/65304.fixed.md b/changelog/65304.fixed.md new file mode 100644 index 00000000000..dd162cee714 --- /dev/null +++ b/changelog/65304.fixed.md @@ -0,0 +1 @@ +pkg.installed state aggregate does not honors requires requisite From e2cf0e561e4a2627e148406250ddee5f56df2e71 Mon Sep 17 00:00:00 2001 From: Akmod Date: Fri, 7 Jun 2024 14:59:17 -0600 Subject: [PATCH 061/160] requisites *should* be aggregated --- tests/pytests/unit/state/test_state_compiler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index bfba17cd4fa..4a546ce1dfe 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -1064,8 +1064,8 @@ def test_mod_aggregate(minion_opts): ] # Ensure that the require requisite from the - # figlet state doesn't find its way into this state - assert "require" not in low_ret + # figlet state finds its way into this state + assert "require" in low_ret # Ensure pkgs were aggregated assert low_ret["pkgs"] == ["figlet", "sl"] From 79b4ffa116e1caff9b52986200160420fabbc9d1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 3 Jun 2024 13:25:44 -0700 Subject: [PATCH 062/160] Revert change to spec file --- pkg/rpm/salt.spec | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index b9f9577d826..f1c62cc0480 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -10,8 +10,8 @@ %define __brp_python_hardlink /usr/bin/true # Disable private libraries from showing in provides -%global __provides_exclude_from ^lib/.*\\.so.*$ -%global __requires_exclude_from ^lib/.*\\.so.*$ +%global __provides_exclude_from ^.*\\.so.*$ +%global __requires_exclude_from ^.*\\.so.*$ %define _source_payload w2.gzdio %define _binary_payload w2.gzdio %define _SALT_GROUP salt From 0958595262d520d1057090769eeae12edb143cf4 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 3 Jun 2024 20:40:03 -0700 Subject: [PATCH 063/160] Fix rpm provides and requires --- pkg/rpm/salt.spec | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index f1c62cc0480..d783f29c048 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -10,8 +10,9 @@ %define __brp_python_hardlink /usr/bin/true # Disable private libraries from showing in provides -%global __provides_exclude_from ^.*\\.so.*$ -%global __requires_exclude_from ^.*\\.so.*$ +%global __to_exclude .*\\.so.* +%global __provides_exclude_from ^.*$ +%global __requires_exclude_from ^.*$ %define _source_payload w2.gzdio %define _binary_payload w2.gzdio %define _SALT_GROUP salt From c5a5738cf34ee154f28b7fd263d7636fdc6eb7fc Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 4 Jun 2024 15:34:27 -0700 Subject: [PATCH 064/160] Add regression test for package meta --- .../pytests/pkg/integration/test_pkg_meta.py | 88 +++++++++++++++++++ 1 file changed, 88 insertions(+) create mode 100644 tests/pytests/pkg/integration/test_pkg_meta.py diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py new file mode 100644 index 00000000000..18a8665e670 --- /dev/null +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -0,0 +1,88 @@ +import subprocess + +import pytest +from pytestskipmarkers.utils import platform + +import salt.utils.path +from tests.support.pkg import ARTIFACTS_DIR + + +@pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") +def test_provides(install_salt, version): + if install_salt.distro_id not in ( + "almalinux", + "rocky", + "centos", + "redhat", + "amzn", + "fedora", + "photon", + ): + pytest.skip("Only tests rpm packages") + if platform.is_aarch64(): + arch = "arm64" + else: + arch = "x86_64" + name = f"salt-{version}-0.{arch}.rpm" + package = ARTIFACTS_DIR / name + assert package.exists() + valid_provides = [ + f"config: config(salt) = {version}-0", + f"manual: salt = {version}", + f"manual: salt = {version}-0", + f"manual: salt({arch.replace('_', '-')}) = {version}-0", + ] + proc = subprocess.run( + ["rpm", "-q", "-v", "-provides", package], capture_output=True, check=True + ) + for line in proc.stdout.decode().splitlines(): + # If we have a provide that does not contain the word "salt" we should + # fail. + assert "salt" in line + # Check sepecific provide lines. + assert line in valid_provides + + +@pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") +def test_requires(install_salt, version): + if install_salt.distro_id not in ( + "almalinux", + "rocky", + "centos", + "redhat", + "amzn", + "fedora", + "photon", + ): + pytest.skip("Only tests rpm packages") + if platform.is_aarch64(): + arch = "arm64" + else: + arch = "x86_64" + name = f"salt-{version}-0.{arch}.rpm" + package = ARTIFACTS_DIR / name + assert package.exists() + valid_requires = [ + "manual: /bin/sh", + "pre,interp: /bin/sh", + "post,interp: /bin/sh", + "preun,interp: /bin/sh", + "manual: /usr/sbin/groupadd", + "manual: /usr/sbin/useradd", + "manual: /usr/sbin/usermod", + f"config: config(salt) = {version}-0", + "manual: dmidecode", + "manual: openssl", + "manual: pciutils", + # Not sure how often these will change, if this check causes things to + # break often we'll want to re-factor. + "rpmlib: rpmlib(CompressedFileNames) <= 3.0.4-1", + "rpmlib: rpmlib(FileDigests) <= 4.6.0-1", + "rpmlib: rpmlib(PayloadFilesHavePrefix) <= 4.0-1", + "manual: which", + ] + proc = subprocess.run( + ["rpm", "-q", "-v", "-requires", package], capture_output=True, check=True + ) + for line in proc.stdout.decode().splitlines(): + assert line in valid_requires From d77574fd0b7fea3599652fb7b809a2abc4f345db Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 4 Jun 2024 21:21:43 -0700 Subject: [PATCH 065/160] Fix rpm name --- tests/pytests/pkg/integration/test_pkg_meta.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py index 18a8665e670..b548fdac734 100644 --- a/tests/pytests/pkg/integration/test_pkg_meta.py +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -20,7 +20,7 @@ def test_provides(install_salt, version): ): pytest.skip("Only tests rpm packages") if platform.is_aarch64(): - arch = "arm64" + arch = "aarch64" else: arch = "x86_64" name = f"salt-{version}-0.{arch}.rpm" From a8c4700c72e22c69f7051d2d4c14460c47e0fc67 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 4 Jun 2024 23:30:04 -0700 Subject: [PATCH 066/160] Fix provides arch --- .../pytests/pkg/integration/test_pkg_meta.py | 41 ++++++++++++------- 1 file changed, 26 insertions(+), 15 deletions(-) diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py index b548fdac734..6d382a3e3b1 100644 --- a/tests/pytests/pkg/integration/test_pkg_meta.py +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -7,8 +7,30 @@ import salt.utils.path from tests.support.pkg import ARTIFACTS_DIR +@pytest.fixture +def pkg_arch(): + if platform.is_aarch64(): + return "aarch64" + else: + return "x86_64" + + +@pytest.fixture +def provides_arch(): + if platform.is_aarch64(): + return "aarch-64" + else: + return "x86-64" + + +@pytest.fixture +def package(version, pkg_arch): + name = f"salt-{version}-0.{pkg_arch}.rpm" + return ARTIFACTS_DIR / name + + @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") -def test_provides(install_salt, version): +def test_provides(install_salt, package, version, provides_arch): if install_salt.distro_id not in ( "almalinux", "rocky", @@ -19,18 +41,13 @@ def test_provides(install_salt, version): "photon", ): pytest.skip("Only tests rpm packages") - if platform.is_aarch64(): - arch = "aarch64" - else: - arch = "x86_64" - name = f"salt-{version}-0.{arch}.rpm" - package = ARTIFACTS_DIR / name + assert package.exists() valid_provides = [ f"config: config(salt) = {version}-0", f"manual: salt = {version}", f"manual: salt = {version}-0", - f"manual: salt({arch.replace('_', '-')}) = {version}-0", + f"manual: salt({provides_arch}) = {version}-0", ] proc = subprocess.run( ["rpm", "-q", "-v", "-provides", package], capture_output=True, check=True @@ -44,7 +61,7 @@ def test_provides(install_salt, version): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") -def test_requires(install_salt, version): +def test_requires(install_salt, package, version): if install_salt.distro_id not in ( "almalinux", "rocky", @@ -55,12 +72,6 @@ def test_requires(install_salt, version): "photon", ): pytest.skip("Only tests rpm packages") - if platform.is_aarch64(): - arch = "arm64" - else: - arch = "x86_64" - name = f"salt-{version}-0.{arch}.rpm" - package = ARTIFACTS_DIR / name assert package.exists() valid_requires = [ "manual: /bin/sh", From 3d539e4dd250937ca7727edc4c3401cb1c4aff64 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 4 Jun 2024 23:33:50 -0700 Subject: [PATCH 067/160] Add changelog for #66604 --- changelog/66604.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/66604.fixed.md diff --git a/changelog/66604.fixed.md b/changelog/66604.fixed.md new file mode 100644 index 00000000000..4d1a771ca54 --- /dev/null +++ b/changelog/66604.fixed.md @@ -0,0 +1 @@ +Fix RPM package provides From 580d8b28514edfaf24a4a7c84d355580a7dad334 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 5 Jun 2024 01:18:11 -0700 Subject: [PATCH 068/160] Skip test on upgrade and downgrade --- tests/pytests/pkg/integration/test_pkg_meta.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py index 6d382a3e3b1..28044c0417c 100644 --- a/tests/pytests/pkg/integration/test_pkg_meta.py +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -31,6 +31,8 @@ def package(version, pkg_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") def test_provides(install_salt, package, version, provides_arch): + if install_salt.downgrade or install_salt.upgrade: + pytest.skip("Only test on install pkg tests") if install_salt.distro_id not in ( "almalinux", "rocky", @@ -62,6 +64,8 @@ def test_provides(install_salt, package, version, provides_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") def test_requires(install_salt, package, version): + if install_salt.downgrade or install_salt.upgrade: + pytest.skip("Only test on install pkg tests") if install_salt.distro_id not in ( "almalinux", "rocky", From 4523450ddfc62ef564037bbc6021e58537ca19e1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 5 Jun 2024 09:11:43 -0700 Subject: [PATCH 069/160] Use artifact version for pkg name --- .../pytests/pkg/integration/test_pkg_meta.py | 23 +++++++++++-------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py index 28044c0417c..3ac77a272bb 100644 --- a/tests/pytests/pkg/integration/test_pkg_meta.py +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -24,13 +24,18 @@ def provides_arch(): @pytest.fixture -def package(version, pkg_arch): - name = f"salt-{version}-0.{pkg_arch}.rpm" +def artifact_version(install_salt): + return install_salt.artifact_version + + +@pytest.fixture +def package(artifact_version, pkg_arch): + name = f"salt-{artifact_version}-0.{pkg_arch}.rpm" return ARTIFACTS_DIR / name @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") -def test_provides(install_salt, package, version, provides_arch): +def test_provides(install_salt, package, artifact_version, provides_arch): if install_salt.downgrade or install_salt.upgrade: pytest.skip("Only test on install pkg tests") if install_salt.distro_id not in ( @@ -46,10 +51,10 @@ def test_provides(install_salt, package, version, provides_arch): assert package.exists() valid_provides = [ - f"config: config(salt) = {version}-0", - f"manual: salt = {version}", - f"manual: salt = {version}-0", - f"manual: salt({provides_arch}) = {version}-0", + f"config: config(salt) = {artifact_version}-0", + f"manual: salt = {artifact_version}", + f"manual: salt = {artifact_version}-0", + f"manual: salt({provides_arch}) = {artifact_version}-0", ] proc = subprocess.run( ["rpm", "-q", "-v", "-provides", package], capture_output=True, check=True @@ -63,7 +68,7 @@ def test_provides(install_salt, package, version, provides_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") -def test_requires(install_salt, package, version): +def test_requires(install_salt, package, artifact_version): if install_salt.downgrade or install_salt.upgrade: pytest.skip("Only test on install pkg tests") if install_salt.distro_id not in ( @@ -85,7 +90,7 @@ def test_requires(install_salt, package, version): "manual: /usr/sbin/groupadd", "manual: /usr/sbin/useradd", "manual: /usr/sbin/usermod", - f"config: config(salt) = {version}-0", + f"config: config(salt) = {artifact_version}-0", "manual: dmidecode", "manual: openssl", "manual: pciutils", From 5bb6c0f6386f15d088b7fdbe2afae5d3346c0698 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 5 Jun 2024 15:45:37 -0700 Subject: [PATCH 070/160] Remove un-needed skip --- tests/pytests/pkg/integration/test_pkg_meta.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py index 3ac77a272bb..ead194c5eea 100644 --- a/tests/pytests/pkg/integration/test_pkg_meta.py +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -36,8 +36,6 @@ def package(artifact_version, pkg_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") def test_provides(install_salt, package, artifact_version, provides_arch): - if install_salt.downgrade or install_salt.upgrade: - pytest.skip("Only test on install pkg tests") if install_salt.distro_id not in ( "almalinux", "rocky", @@ -69,8 +67,6 @@ def test_provides(install_salt, package, artifact_version, provides_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") def test_requires(install_salt, package, artifact_version): - if install_salt.downgrade or install_salt.upgrade: - pytest.skip("Only test on install pkg tests") if install_salt.distro_id not in ( "almalinux", "rocky", From 19669adde275afa8b798c111a3014d78103b755f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 10 Jun 2024 19:40:53 -0700 Subject: [PATCH 071/160] Fix broken gitfs ssh tests on nightly builds --- tests/support/gitfs.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/tests/support/gitfs.py b/tests/support/gitfs.py index 82a5d50d7c0..6ab90f4c8a4 100644 --- a/tests/support/gitfs.py +++ b/tests/support/gitfs.py @@ -6,6 +6,7 @@ import errno import logging import os import shutil +import subprocess import tempfile import textwrap @@ -595,6 +596,12 @@ class GitPillarSSHTestBase(GitPillarTestBase): ) self.make_repo(root_dir, user=self.username) self.make_extra_repo(root_dir, user=self.username) + # Force git repo ownership to prevent "fatal: detected dubious + # ownership in repository" errors. + subprocess.run( + ["chown", "-R", f"{self.username}:users", f"/home/{self.username}"], + check=True, + ) log.info("%s.setUp() complete.", self.__class__.__name__) def get_pillar(self, ext_pillar_conf): From 10dce329d52c58483c7d8a13ca0b44c07dc560d7 Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Mon, 10 Jun 2024 14:20:11 -0500 Subject: [PATCH 072/160] Drop CentOS 7 support; Package with Rocky Linux 9 --- .github/workflows/build-packages.yml | 2 +- .github/workflows/ci.yml | 45 ---------------------- .github/workflows/lint-action.yml | 4 +- .github/workflows/nightly.yml | 57 ---------------------------- .github/workflows/scheduled.yml | 45 ---------------------- .github/workflows/staging.yml | 56 --------------------------- changelog/66623.deprecated.md | 1 + changelog/66624.added.md | 1 + changelog/66624.deprecated.md | 1 + cicd/golden-images.json | 20 ---------- tools/ci.py | 11 +++--- tools/pkg/repo/create.py | 4 +- tools/precommit/workflows.py | 11 +----- tools/utils/gh.py | 2 +- 14 files changed, 15 insertions(+), 245 deletions(-) create mode 100644 changelog/66623.deprecated.md create mode 100644 changelog/66624.added.md create mode 100644 changelog/66624.deprecated.md diff --git a/.github/workflows/build-packages.yml b/.github/workflows/build-packages.yml index 443c1b12260..06cf4d5337f 100644 --- a/.github/workflows/build-packages.yml +++ b/.github/workflows/build-packages.yml @@ -276,7 +276,7 @@ jobs: - ${{ inputs.source }} container: - image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9 + image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9 steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 717682ee55d..67eae1bf1dd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -645,27 +645,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - centos-7-pkg-tests: - name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - debian-11-pkg-tests: name: Debian 11 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }} @@ -1558,27 +1537,6 @@ jobs: workflow-slug: ci default-timeout: 180 - centos-7: - name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - debian-11: name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }} @@ -2027,7 +1985,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -2195,7 +2152,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -2224,7 +2180,6 @@ jobs: - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests - amazonlinux-2023-arm64-pkg-tests - - centos-7-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests - debian-12-pkg-tests diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml index 3e1ce1928c7..3c3df9cfb89 100644 --- a/.github/workflows/lint-action.yml +++ b/.github/workflows/lint-action.yml @@ -22,7 +22,7 @@ jobs: if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }} container: - image: ghcr.io/saltstack/salt-ci-containers/python:3.9 + image: ghcr.io/saltstack/salt-ci-containers/python:3.10 steps: - name: Install System Deps @@ -66,7 +66,7 @@ jobs: if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }} container: - image: ghcr.io/saltstack/salt-ci-containers/python:3.8 + image: ghcr.io/saltstack/salt-ci-containers/python:3.10 steps: - name: Install System Deps diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 23b276fbfc3..e382a15ede7 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -707,27 +707,6 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - centos-7-pkg-tests: - name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: false - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - debian-11-pkg-tests: name: Debian 11 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1620,27 +1599,6 @@ jobs: workflow-slug: nightly default-timeout: 360 - centos-7: - name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: false - workflow-slug: nightly - default-timeout: 360 - debian-11: name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2089,7 +2047,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -2536,18 +2493,6 @@ jobs: distro: photon version: "5" arch: aarch64 - - pkg-type: rpm - distro: redhat - version: "7" - arch: x86_64 - - pkg-type: rpm - distro: redhat - version: "7" - arch: arm64 - - pkg-type: rpm - distro: redhat - version: "7" - arch: aarch64 - pkg-type: rpm distro: redhat version: "8" @@ -3009,7 +2954,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -3096,7 +3040,6 @@ jobs: - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests - amazonlinux-2023-arm64-pkg-tests - - centos-7-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests - debian-12-pkg-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 5c355b5dde5..72d6ec60ac0 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -684,27 +684,6 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - centos-7-pkg-tests: - name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: false - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - debian-11-pkg-tests: name: Debian 11 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1597,27 +1576,6 @@ jobs: workflow-slug: scheduled default-timeout: 360 - centos-7: - name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: false - workflow-slug: scheduled - default-timeout: 360 - debian-11: name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2066,7 +2024,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -2236,7 +2193,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -2265,7 +2221,6 @@ jobs: - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests - amazonlinux-2023-arm64-pkg-tests - - centos-7-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests - debian-12-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 8a462a5ed8b..bfde12f5414 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -692,27 +692,6 @@ jobs: skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - centos-7-pkg-tests: - name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: true - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - debian-11-pkg-tests: name: Debian 11 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1605,27 +1584,6 @@ jobs: workflow-slug: staging default-timeout: 180 - centos-7: - name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14 - skip-code-coverage: true - workflow-slug: staging - default-timeout: 180 - debian-11: name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2360,18 +2318,6 @@ jobs: distro: photon version: "5" arch: aarch64 - - pkg-type: rpm - distro: redhat - version: "7" - arch: x86_64 - - pkg-type: rpm - distro: redhat - version: "7" - arch: arm64 - - pkg-type: rpm - distro: redhat - version: "7" - arch: aarch64 - pkg-type: rpm distro: redhat version: "8" @@ -2967,7 +2913,6 @@ jobs: - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - debian-11 - debian-11-arm64 - debian-12 @@ -2996,7 +2941,6 @@ jobs: - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests - amazonlinux-2023-arm64-pkg-tests - - centos-7-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests - debian-12-pkg-tests diff --git a/changelog/66623.deprecated.md b/changelog/66623.deprecated.md new file mode 100644 index 00000000000..8d829eadec9 --- /dev/null +++ b/changelog/66623.deprecated.md @@ -0,0 +1 @@ +Drop CentOS 7 support diff --git a/changelog/66624.added.md b/changelog/66624.added.md new file mode 100644 index 00000000000..fbc4adf84c7 --- /dev/null +++ b/changelog/66624.added.md @@ -0,0 +1 @@ +Build RPM packages with Rocky Linux 9 (instead of CentOS Stream 9) diff --git a/changelog/66624.deprecated.md b/changelog/66624.deprecated.md new file mode 100644 index 00000000000..10b397bae85 --- /dev/null +++ b/changelog/66624.deprecated.md @@ -0,0 +1 @@ +No longer build RPM packages with CentOS Stream 9 diff --git a/cicd/golden-images.json b/cicd/golden-images.json index ee7a8acc80e..ca7818fdd6b 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -49,26 +49,6 @@ "is_windows": "false", "ssh_username": "arch" }, - "centos-7-arm64": { - "ami": "ami-0ef52419c91cb0169", - "ami_description": "CI Image of CentOS 7 arm64", - "ami_name": "salt-project/ci/centos/7/arm64/20240509.1530", - "arch": "arm64", - "cloudwatch-agent-available": "true", - "instance_type": "m6g.large", - "is_windows": "false", - "ssh_username": "centos" - }, - "centos-7": { - "ami": "ami-0973c8d1b91dcba5c", - "ami_description": "CI Image of CentOS 7 x86_64", - "ami_name": "salt-project/ci/centos/7/x86_64/20240509.1530", - "arch": "x86_64", - "cloudwatch-agent-available": "true", - "instance_type": "t3a.large", - "is_windows": "false", - "ssh_username": "centos" - }, "debian-11-arm64": { "ami": "ami-0eff227d9a94d8692", "ami_description": "CI Image of Debian 11 arm64", diff --git a/tools/ci.py b/tools/ci.py index 635462d282f..0f9f30ca126 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -682,8 +682,8 @@ def matrix( for transport in ("zeromq", "tcp"): if transport == "tcp": if distro_slug not in ( - "centosstream-9", - "centosstream-9-arm64", + "rockylinux-9", + "rockylinux-9-arm64", "photonos-5", "photonos-5-arm64", "ubuntu-22.04", @@ -831,7 +831,7 @@ def pkg_matrix( if name == "amazonlinux": name = "amazon" - elif "centos" in name or name == "rockylinux": + elif name == "rockylinux": name = "redhat" elif "photon" in name: name = "photon" @@ -967,8 +967,8 @@ def get_ci_deps_matrix(ctx: Context): _matrix = { "linux": [ - {"distro-slug": "centos-7", "arch": "x86_64"}, - {"distro-slug": "centos-7-arm64", "arch": "arm64"}, + {"distro-slug": "rockylinux-9", "arch": "x86_64"}, + {"distro-slug": "rockylinux-9-arm64", "arch": "arm64"}, ], "macos": [ {"distro-slug": "macos-12", "arch": "x86_64"}, @@ -1031,7 +1031,6 @@ def get_pkg_downloads_matrix(ctx: Context): rpm_slugs = ( "rockylinux", "amazonlinux", - "centos", "fedora", "photon", ) diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index b62daf294f1..ee494430111 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -435,7 +435,7 @@ def rpm( createrepo = shutil.which("createrepo") if createrepo is None: - container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" + container = "ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9" ctx.info(f"Using docker container '{container}' to call 'createrepo'...") uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() @@ -493,7 +493,7 @@ def rpm( if distro == "amazon": distro_name = "Amazon Linux" elif distro == "redhat": - distro_name = "RHEL/CentOS" + distro_name = "RHEL" else: distro_name = distro.capitalize() diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index d8040430b3b..aa844b904cc 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -52,7 +52,6 @@ TEST_SALT_LISTING = PlatformDefinitions( arch="arm64", ), Linux(slug="archlinux-lts", display_name="Arch Linux LTS", arch="x86_64"), - Linux(slug="centos-7", display_name="CentOS 7", arch="x86_64"), Linux(slug="debian-11", display_name="Debian 11", arch="x86_64"), Linux(slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64"), Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"), @@ -245,12 +244,6 @@ def generate_workflows(ctx: Context): arch="arm64", pkg_type="rpm", ), - Linux( - slug="centos-7", - display_name="CentOS 7", - arch="x86_64", - pkg_type="rpm", - ), Linux( slug="debian-11", display_name="Debian 11", @@ -425,9 +418,7 @@ def generate_workflows(ctx: Context): for slug in sorted(tools.utils.get_golden_images()): if slug.endswith("-arm64"): continue - if not slug.startswith( - ("amazonlinux", "rockylinux", "centos", "fedora", "photonos") - ): + if not slug.startswith(("amazonlinux", "rockylinux", "fedora", "photonos")): continue os_name, os_version = slug.split("-") if os_name == "amazonlinux": diff --git a/tools/utils/gh.py b/tools/utils/gh.py index 9360a799fe5..8d030dbc07b 100644 --- a/tools/utils/gh.py +++ b/tools/utils/gh.py @@ -219,7 +219,7 @@ def download_pkgs_artifact( if slug.startswith(("debian", "ubuntu")): artifact_name += f"{arch}-deb" elif slug.startswith( - ("rockylinux", "amazonlinux", "centos", "fedora", "opensuse", "photonos") + ("rockylinux", "amazonlinux", "fedora", "opensuse", "photonos") ): artifact_name += f"{arch}-rpm" else: From a108024fcab142ed23984ed95ab3d09346a50cd5 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 11 Jun 2024 20:37:38 -0700 Subject: [PATCH 073/160] Upgrade relenv to 0.16.1 --- .github/workflows/ci.yml | 8 ++++---- .github/workflows/nightly.yml | 8 ++++---- .github/workflows/scheduled.yml | 8 ++++---- .github/workflows/staging.yml | 8 ++++---- cicd/shared-gh-workflows-context.yml | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 67eae1bf1dd..bd3e777d3f9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -416,7 +416,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-salt-onedir: @@ -432,7 +432,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-pkgs-onedir: @@ -445,7 +445,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "onedir" @@ -459,7 +459,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "src" build-ci-deps: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e382a15ede7..4f851c7d93c 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -470,7 +470,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-salt-onedir: @@ -486,7 +486,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-pkgs-onedir: @@ -499,7 +499,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "onedir" environment: nightly @@ -517,7 +517,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "src" environment: nightly diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 72d6ec60ac0..ca2e0ab3bd7 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -455,7 +455,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-salt-onedir: @@ -471,7 +471,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-pkgs-onedir: @@ -484,7 +484,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "onedir" @@ -498,7 +498,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "src" build-ci-deps: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index bfde12f5414..3a1c39bd4ad 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -455,7 +455,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-salt-onedir: @@ -471,7 +471,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" build-pkgs-onedir: @@ -484,7 +484,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "onedir" environment: staging @@ -502,7 +502,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.0" + relenv-version: "0.16.1" python-version: "3.10.14" source: "src" environment: staging diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index cb1e77e3664..9b2955f5974 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,6 +1,6 @@ nox_version: "2022.8.7" python_version: "3.10.14" -relenv_version: "0.16.0" +relenv_version: "0.16.1" mandatory_os_slugs: - rockylinux-9 - amazonlinux-2023-arm64 From 34151d449020b0c037ca17dcc8050aad35316ddc Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 14 Jun 2024 08:46:15 -0700 Subject: [PATCH 074/160] Add changelog for 66632 --- changelog/66632.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/66632.fixed.md diff --git a/changelog/66632.fixed.md b/changelog/66632.fixed.md new file mode 100644 index 00000000000..c50213867ca --- /dev/null +++ b/changelog/66632.fixed.md @@ -0,0 +1 @@ +Upgrade relAenv to 0.16.1. This release fixes several package installs for salt-pip From ad253d724ac1af912ad0a92f3f76779d19ec5734 Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Wed, 12 Jun 2024 16:57:32 -0500 Subject: [PATCH 075/160] Replace expired pubkey for security reporting --- SECURITY.md | 114 ++++++++++++++----------------- doc/security/index.rst | 151 ++++++++++++++--------------------------- 2 files changed, 102 insertions(+), 163 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index 97afd202de6..eaf22ee8b5e 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -4,77 +4,65 @@ - saltproject-security.pdl@broadcom.com **GPG key ID:** -- 4EA0793D +- 37654A06 **GPG key fingerprint:** -- `8ABE 4EFC F0F4 B24B FF2A AF90 D570 F2D3 4EA0 793D` +- `99EF 26F2 6469 2D24 973A 7007 E8BF 76A7 3765 4A06` **GPG Public Key** ``` -----BEGIN PGP PUBLIC KEY BLOCK----- -mQINBFO15mMBEADa3CfQwk5ED9wAQ8fFDku277CegG3U1hVGdcxqKNvucblwoKCb -hRK6u9ihgaO9V9duV2glwgjytiBI/z6lyWqdaD37YXG/gTL+9Md+qdSDeaOa/9eg -7y+g4P+FvU9HWUlujRVlofUn5Dj/IZgUywbxwEybutuzvvFVTzsn+DFVwTH34Qoh -QIuNzQCSEz3Lhh8zq9LqkNy91ZZQO1ZIUrypafspH6GBHHcE8msBFgYiNBnVcUFH -u0r4j1Rav+621EtD5GZsOt05+NJI8pkaC/dDKjURcuiV6bhmeSpNzLaXUhwx6f29 -Vhag5JhVGGNQxlRTxNEM86HEFp+4zJQ8m/wRDrGX5IAHsdESdhP+ljDVlAAX/ttP -/Ucl2fgpTnDKVHOA00E515Q87ZHv6awJ3GL1veqi8zfsLaag7rw1TuuHyGLOPkDt -t5PAjsS9R3KI7pGnhqI6bTOi591odUdgzUhZChWUUX1VStiIDi2jCvyoOOLMOGS5 -AEYXuWYP7KgujZCDRaTNqRDdgPd93Mh9JI8UmkzXDUgijdzVpzPjYgFaWtyK8lsc -Fizqe3/Yzf9RCVX/lmRbiEH+ql/zSxcWlBQd17PKaL+TisQFXcmQzccYgAxFbj2r -QHp5ABEu9YjFme2Jzun7Mv9V4qo3JF5dmnUk31yupZeAOGZkirIsaWC3hwARAQAB -tDBTYWx0U3RhY2sgU2VjdXJpdHkgVGVhbSA8c2VjdXJpdHlAc2FsdHN0YWNrLmNv -bT6JAj4EEwECACgFAlO15mMCGwMFCQeGH4AGCwkIBwMCBhUIAgkKCwQWAgMBAh4B -AheAAAoJENVw8tNOoHk9z/MP/2vzY27fmVxU5X8joiiturjlgEqQw41IYEmWv1Bw -4WVXYCHP1yu/1MC1uuvOmOd5BlI8YO2C2oyW7d1B0NorguPtz55b7jabCElekVCh -h/H4ZVThiwqgPpthRv/2npXjIm7SLSs/kuaXo6Qy2JpszwDVFw+xCRVL0tH9KJxz -HuNBeVq7abWD5fzIWkmGM9hicG/R2D0RIlco1Q0VNKy8klG+pOFOW886KnwkSPc7 -JUYp1oUlHsSlhTmkLEG54cyVzrTP/XuZuyMTdtyTc3mfgW0adneAL6MARtC5UB/h -q+v9dqMf4iD3wY6ctu8KWE8Vo5MUEsNNO9EA2dUR88LwFZ3ZnnXdQkizgR/Aa515 -dm17vlNkSoomYCo84eN7GOTfxWcq+iXYSWcKWT4X+h/ra+LmNndQWQBRebVUtbKE -ZDwKmiQz/5LY5EhlWcuU4lVmMSFpWXt5FR/PtzgTdZAo9QKkBjcv97LYbXvsPI69 -El1BLAg+m+1UpE1L7zJT1il6PqVyEFAWBxW46wXCCkGssFsvz2yRp0PDX8A6u4yq -rTkt09uYht1is61joLDJ/kq3+6k8gJWkDOW+2NMrmf+/qcdYCMYXmrtOpg/wF27W -GMNAkbdyzgeX/MbUBCGCMdzhevRuivOI5bu4vT5s3KdshG+yhzV45bapKRd5VN+1 -mZRqiQJVBBMBCAA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw -9LJL/yqvkNVw8tNOoHk9BQJe1uRXBQkPoTz0AAoJENVw8tNOoHk9akAQANKIDIBY -J3DmWH3g6rWURdREQcBVfMkw6j5MHlIEwlGrN3whSaPv2KR3tatRccBCQ0olQeYb -ZeFtPuf0Du+LqGaAePo5DkPNU7GHoba2+ZE/sJ4wZ4CzAQM6+LvH2iLHeLZ1VLlu -ZEftxD1RFKTqpnav8KiyYGkeFuEn4eMSIhbudp/8wkN40sCWL22D141EhVSRvLlO -BMUpTWdtSYTg0F2pgQL5U2A56syuiwUwPXzQb45JEJILmG8zkeJB9s8kGtErypIH -P+qxJXq24woGUFeJjiLdiOhI6/YoVBACUkKmig36CGf/DH5NAeQECeZq3YBNp7XK -tsF1dPitxuTM/UkOHoHUnGhDlBcQMWe9WuBK4rA+7GH9NT8o7M6+2OKhk181tJ+s -Y2kP7RSXOV162thRsNvVImXajAIFTR3ksEDFGVq/4jh85jFoIbNH3x27NxOu6e2p -OIkXNXmSFXLUmwbfEfIk06gqP3xzkaj+eWHcLDkn9bUKblBJhHdhf9Vsy/N2NRW2 -23c64qDutw1NX7msDuN3KXisim+isBzPVVzymkkhkXK+UpjrRR0ePvph3fnGf1bc -NipVtn1KKM7kurSrSjFVLwLi52SGnEHKJnbbhh+AKV09SNYi6IaKL8yw8c1d0K80 -PlBaJEvkC6myzaaRtYcna4pbiIysBaZtwDOOuQINBFO15mMBEAC5UuLii9ZLz6qH -fIJp35IOW9U8SOf7QFhzXR7NZ3DmJsd3f6Nb/habQFIHjm3K9wbpj+FvaW2oWRlF -VvYdzjUq6c82GUUjW1dnqgUvFwdmM8351n0YQ2TonmyaF882RvsRZrbJ65uvy7SQ -xlouXaAYOdqwLsPxBEOyOnMPSktW5V2UIWyxsNP3sADchWIGq9p5D3Y/loyIMsS1 -dj+TjoQZOKSj7CuRT98+8yhGAY8YBEXu9r3I9o6mDkuPpAljuMc8r09Im6az2egt -K/szKt4Hy1bpSSBZU4W/XR7XwQNywmb3wxjmYT6Od3Mwj0jtzc3gQiH8hcEy3+BO -+NNmyzFVyIwOLziwjmEcw62S57wYKUVnHD2nglMsQa8Ve0e6ABBMEY7zGEGStva5 -9rfgeh0jUMJiccGiUDTMs0tdkC6knYKbu/fdRqNYFoNuDcSeLEw4DdCuP01l2W4y -Y+fiK6hAcL25amjzc+yYo9eaaqTn6RATbzdhHQZdpAMxY+vNT0+NhP1Zo5gYBMR6 -5Zp/VhFsf67ijb03FUtdw9N8dHwiR2m8vVA8kO/gCD6wS2p9RdXqrJ9JhnHYWjiV -uXR+f755ZAndyQfRtowMdQIoiXuJEXYw6XN+/BX81gJaynJYc0uw0MnxWQX+A5m8 -HqEsbIFUXBYXPgbwXTm7c4IHGgXXdwARAQABiQI8BBgBCAAmAhsMFiEEir5O/PD0 -skv/Kq+Q1XDy006geT0FAl7W5K0FCQ+hPUoACgkQ1XDy006geT1Q0Q//atnw1D4J -13nL8Mygk+ANY4Xljub/TeZqKtzmnWGso843XysErLH1adCu1KDX1Dj4/o3WoPOt -0O78uSS81N428ocOPKx+fA63n7q1mRqHHy6pLLVKoT66tmvE1ZN0ObaiPK9IxZkB -ThGlHJk9VaUg0vzAaRznogWeBh1dyZktVrtbUO5u4xDX9iql/unVmCWm+U1R7t4q -fqPEbk8ZnWc7x4bAZf8/vSQ93mAbpnRRuJdDK9tsiuhl8pRz7OyzvMS81rVF75ja -7CcShPofrW4yZ7FqAUMwTbfrvsAraWmDjW17Ao7C2dUA9ViwSKJ6u6Pd5no/hwbm -jVoxtO2RvjGOBxKneD36uENAUMBExjDTkSHmOxUYSknrEKUy7P1OL2ZHLG8/rouN -5ZvIxHiMkz12ukSt29IHvCngn1UB4/7+tvDHqug4ZAZPuwH7TC5Hk6WO0OoK8Eb2 -sQa2QoehQjwK0IakGd5kFEqKgbrwYPPa3my7l58nOZmPHdMcTOzgKvUEYAITjsT4 -oOtocs9Nj+cfCfp6YUn6JeYfiHs+Xhze5igdWIl0ZO5rTmbqcD8A1URKBds0WA+G -FLP9shPC0rS/L3Y1fKhqAc0h+znWBU6xjipTkmzh3FdM8gGT6g9YwGQNbi/x47k5 -vtBIWO4LPeGEvb2Gs65PL2eouOqU6yvBr5Y= -=F/97 +mQINBGZpxDsBEACz8yoRBXaJiifaWz3wd4FLSO18mgH7H/+0iNTbV1ZwhgGEtWTF +Z31HfrsbxVgICoMgFYt8WKnc4MHZLIgDfTuCFQpf7PV/VqRBAknZwQKEAjHfrYNz +Q1vy3CeKC1qcKQISEQr7VFf58sOC8GJ54jLLc2rCsg9cXI6yvUFtGwL9Qv7g/NZn +rtLjc4NZIKdIvSt+/PtooQtsz0jfLMdMpMFa41keH3MknIbydBUnGj7eC8ANN/iD +Re2QHAW2KfQh3Ocuh/DpJ0/dwbzXmXfMWHk30E+s31TfdLiFt1Iz5kZDF8iHrDMq +x39/GGmF10y5rfq43V1Ucxm+1tl5Km0JcX6GpPUtgRpfUYAxwxfGfezt4PjYRYH2 +mNxXXPLsnVTvdWPTvS0msSrcTHmnU5His38I6goXI7dLZm0saqoWi3sqEQ8TPS6/ +DkLtYjpb/+dql+KrXD7erd3j8KKflIXn7AEsv+luNk6czGOKgdG9agkklzOHfEPc +xOGmaFfe/1mu8HxgaCuhNAQWlk79ZC+GAm0sBZIQAQRtABgag5vWr16hVix7BPMG +Fp8+caOVv6qfQ7gBmJ3/aso6OzyOxsluVxQRt94EjPTm0xuwb1aYNJOhEj9cPkjQ +XBjo3KN0rwcAViR/fdUzrIV1sn2hms0v5WZ+TDtz1w0OpLZOwe23BDE1+QARAQAB +tEJTYWx0IFByb2plY3QgU2VjdXJpdHkgVGVhbSA8c2FsdHByb2plY3Qtc2VjdXJp +dHkucGRsQGJyb2FkY29tLmNvbT6JAlcEEwEKAEEWIQSZ7ybyZGktJJc6cAfov3an +N2VKBgUCZmnEOwIbAwUJB4TOAAULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAK +CRDov3anN2VKBk7rD/9QdcYdNGfk96W906HlVpb3JCwT0t9T7ElP97Ot0YN6LqMj +vVQpxWYi7riUSyt1FtlCAM+hmghImzILF9LKDRCZ1H5UStI/u9T53cZpUZtVW/8R +bUNBCl495UcgioIZG5DsfZ/GdBOgY+hQfdgh7HC8a8A/owCt2hHbnth970NQ+LHb +/0ERLfOHRxozgPBhze8Vqf939KlteM5ljgTw/IkJJIsxJi4C6pQntSHvB3/Bq/Nw +Kf3vk3XYFtVibeQODSVvc6useo+SNGV/wsK/6kvh/vfP9Trv/GMOn/89Bj2aL1PR +M382E6sDB9d22p4ehVgbcOpkwHtr9DGerK9xzfG4aUjLu9qVD5Ep3gqKSsCe+P8z +bpADdVCnk+Vdp3Bi+KI7buSkqfbZ0m9vCY3ei1fMiDiTTjvNliL5QCO6PvYNYiDw ++LLImrQThv55ZRQsRRT7J6A94kwDoI6zcBEalv/aPws0nQHJtgWRUpmy5RcbVu9Z +QBXlUpCzCB+gGaGRE1u0hCfuvkbcG1pXFFBdSUuAK4o4ktiRALVUndELic/PU1nR +jwo/+j0SGw/jTwqVChUfLDZbiAQ2JICoVpZ+e1zQfsxa/yDu2e4D543SvNFHDsxh +bsBeCsopzJSA0n2HAdYvPxOPoWVvZv+U8ZV3EEVOUgsO5//cRJddCgLU89Q4DrkC +DQRmacQ7ARAAsz8jnpfw3DCRxdCVGiqWAtgj8r2gx5n1wJsKsgvyGQdKUtPwlX04 +7w13lIDT2DwoXFozquYsTn9XkIoWbVckqo0NN/V7/QxIZIYTqRcFXouHTbXDJm5C +tsvfDlnTsaplyRawPU2mhYg39/lzIt8zIjvy5zo/pElkRP5m03nG+ItrsHN6CCvf +ZiRxme6EQdn+aoHh2GtICL8+c3HvQzTHYKxFn84Ibt3uNxwt+Mu6YhG9tkYMQQk5 +SkYA4CYAaw2Lc/g0ee36iqw/5d79M8YcQtHhy5zzqgdEvExjFPdowV1hhFIEkNkM +uqIAknXVesqLLw2hPeYmyhYQqeBKIrWmBhBKX9c0vMYkDDH3T/sSylVhH0QAXP6E +WmLja3E1ov6pt6j7j/wWzC9LSMFDJI2yWCeOE1oea5D89tH6XvsGRTiog62zF/9a +77197iIa0+o91chp4iLkzDvuK8pVujPx8bNsK8jlJ+OW73NmliCVg+hecoFLNsri +/TsBngFNVcu79Q1XfyvoDdR2C09ItCBEZGt6LOlq/+ATUw1aBz6L1hvLBtiR3Hfu +X31YlbxdvVPjlzg6O6GXSfnokNTWv2mVXWTRIrP0RrKvMyiNPXVW7EunUuXI0Axk +Xg3E5kAjKXkBXzoCTCVz/sXPLjvjI0x3Z7obgPpcTi9h5DIX6PFyK/kAEQEAAYkC +PAQYAQoAJhYhBJnvJvJkaS0klzpwB+i/dqc3ZUoGBQJmacQ7AhsMBQkHhM4AAAoJ +EOi/dqc3ZUoGDeAQAKbyiHA1sl0fnvcZxoZ3mWA/Qesddp7Nv2aEW8I3hAJoTVml +ZvMxk8leZgsQJtSsVDNnxeyW+WCIUkhxmd95UlkTTj5mpyci1YrxAltPJ2TWioLe +F2doP8Y+4iGnaV+ApzWG33sLr95z37RKVdMuGk/O5nLMeWnSPA7HHWJCxECMm0SH +uI8aby8w2aBZ1kOMFB/ToEEzLBu9fk+zCzG3uH8QhdciMENVhsyBSULIrmwKglyI +VQwj2dXHyekQh7QEHV+CdKMfs3ZOANwm52OwjaK0dVb3IMFGvlUf4UXXfcXwLAkj +vW+Ju4kLGxVQpOlh1EBain9WOaHZGh6EGuTpjJO32PyRq8iSMNb8coeonoPFWrE/ +A5dy3z5x5CZhJ6kyNwYs/9951r30Ct9qNZo9WZwp8AGQVs+J9XEYnZIWXnO1hdKs +dRStPvY7VqS500t8eWqWRfCLgofZAb9Fv7SwTPQ2G7bOuTXmQKAIEkU9vzo5XACu +AtR/9bC9ghNnlNuH4xiViBclrq2dif/I2ZwItpQHjuCDeMKz9kdADRI0tuNPpRHe +QP1YpURW+I+PYZzNgbnwzl6Bxo7jCHFgG6BQ0ih5sVwEDhlXjSejd8CNMYEy3ElL +xJLUpltwXLZSrJEXYjtJtnh0om71NXes0OyWE1cL4+U6WA9Hho6xedjk2bai +=pPmt -----END PGP PUBLIC KEY BLOCK----- ``` diff --git a/doc/security/index.rst b/doc/security/index.rst index e5a36381e4e..13a6b71f665 100644 --- a/doc/security/index.rst +++ b/doc/security/index.rst @@ -5,8 +5,8 @@ Security disclosure policy ========================== :email: saltproject-security.pdl@broadcom.com -:gpg key ID: 4EA0793D -:gpg key fingerprint: ``8ABE 4EFC F0F4 B24B FF2A AF90 D570 F2D3 4EA0 793D`` +:gpg key ID: 37654A06 +:gpg key fingerprint: ``99EF 26F2 6469 2D24 973A 7007 E8BF 76A7 3765 4A06`` **gpg public key:** @@ -14,104 +14,55 @@ Security disclosure policy -----BEGIN PGP PUBLIC KEY BLOCK----- - mQINBFO15mMBEADa3CfQwk5ED9wAQ8fFDku277CegG3U1hVGdcxqKNvucblwoKCb - hRK6u9ihgaO9V9duV2glwgjytiBI/z6lyWqdaD37YXG/gTL+9Md+qdSDeaOa/9eg - 7y+g4P+FvU9HWUlujRVlofUn5Dj/IZgUywbxwEybutuzvvFVTzsn+DFVwTH34Qoh - QIuNzQCSEz3Lhh8zq9LqkNy91ZZQO1ZIUrypafspH6GBHHcE8msBFgYiNBnVcUFH - u0r4j1Rav+621EtD5GZsOt05+NJI8pkaC/dDKjURcuiV6bhmeSpNzLaXUhwx6f29 - Vhag5JhVGGNQxlRTxNEM86HEFp+4zJQ8m/wRDrGX5IAHsdESdhP+ljDVlAAX/ttP - /Ucl2fgpTnDKVHOA00E515Q87ZHv6awJ3GL1veqi8zfsLaag7rw1TuuHyGLOPkDt - t5PAjsS9R3KI7pGnhqI6bTOi591odUdgzUhZChWUUX1VStiIDi2jCvyoOOLMOGS5 - AEYXuWYP7KgujZCDRaTNqRDdgPd93Mh9JI8UmkzXDUgijdzVpzPjYgFaWtyK8lsc - Fizqe3/Yzf9RCVX/lmRbiEH+ql/zSxcWlBQd17PKaL+TisQFXcmQzccYgAxFbj2r - QHp5ABEu9YjFme2Jzun7Mv9V4qo3JF5dmnUk31yupZeAOGZkirIsaWC3hwARAQAB - tDBTYWx0U3RhY2sgU2VjdXJpdHkgVGVhbSA8c2VjdXJpdHlAc2FsdHN0YWNrLmNv - bT6JAj4EEwECACgFAlO15mMCGwMFCQeGH4AGCwkIBwMCBhUIAgkKCwQWAgMBAh4B - AheAAAoJENVw8tNOoHk9z/MP/2vzY27fmVxU5X8joiiturjlgEqQw41IYEmWv1Bw - 4WVXYCHP1yu/1MC1uuvOmOd5BlI8YO2C2oyW7d1B0NorguPtz55b7jabCElekVCh - h/H4ZVThiwqgPpthRv/2npXjIm7SLSs/kuaXo6Qy2JpszwDVFw+xCRVL0tH9KJxz - HuNBeVq7abWD5fzIWkmGM9hicG/R2D0RIlco1Q0VNKy8klG+pOFOW886KnwkSPc7 - JUYp1oUlHsSlhTmkLEG54cyVzrTP/XuZuyMTdtyTc3mfgW0adneAL6MARtC5UB/h - q+v9dqMf4iD3wY6ctu8KWE8Vo5MUEsNNO9EA2dUR88LwFZ3ZnnXdQkizgR/Aa515 - dm17vlNkSoomYCo84eN7GOTfxWcq+iXYSWcKWT4X+h/ra+LmNndQWQBRebVUtbKE - ZDwKmiQz/5LY5EhlWcuU4lVmMSFpWXt5FR/PtzgTdZAo9QKkBjcv97LYbXvsPI69 - El1BLAg+m+1UpE1L7zJT1il6PqVyEFAWBxW46wXCCkGssFsvz2yRp0PDX8A6u4yq - rTkt09uYht1is61joLDJ/kq3+6k8gJWkDOW+2NMrmf+/qcdYCMYXmrtOpg/wF27W - GMNAkbdyzgeX/MbUBCGCMdzhevRuivOI5bu4vT5s3KdshG+yhzV45bapKRd5VN+1 - mZRqiQJVBBMBAgA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw - 9LJL/yqvkNVw8tNOoHk9BQJb0e5rBQkL3m8IAAoJENVw8tNOoHk9fzMP/ApQtkQD - BmoYEBTF6BH1bywzDw5OHpnBSLbuoYtA3gkhnm/83MzFDcGn22pgo2Fv0MuHltWI - G2oExzje7szmcM6Xg3ZTKapJ3/p2J+P33tkJA1LWpg+DdgdQlqrjlXKwEnikszuB - 9IMhbjoPeBzwiUtsBQmcwbVgwMzbscwoV5DJ/gLDCkgF4rp2uKEYAcBi8s9NGX6p - zQsb9Sb0/bKdCrszAcvUn4WYB6WbAPttvutYHtg/nZfXEeX/SgBueXo3lO9vzFlO - r3Zgk7WeucsEqa9Qo0VLOq28HykixM5mEJKsAQrNIqM1DqXgfDch8RJAHzgMBHFH - Qi9hJXk1/6OA2FPXQGcA9Td5Dt0i1Z7wMrAUMj3s9gNMVCD0hQqEKfUtpyV7KBAj - AO5j8Wr8KafnRm6czBCkcV0SRzHQSHdYyncozWwPgWOaRC9AY9fEDz8lBaSoB/C+ - dyO/xZMTWoaWqkHozVoHIrCc4CAtZTye/5mxFhq15Q1Iy/NjelrMTCD1kql1dNIP - oOgfOYl1xLMQIBwrrCrgeRIvxEgKRf9KOLbSrS7+3vOKoxf+LD4AQfLci8dFyH+I - t0Z43nk93yTOI82RTdz5GwUXIKcvGhsJ8bgNlGTxM1R/Sl8Sg8diE2PRAp/fk7+g - CwOM8VkeyrDM2k1cy64d8USkbR7YtT3otyFQiQJVBBMBCAA/AhsDBgsJCAcDAgYV - CAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw9LJL/yqvkNVw8tNOoHk9BQJeapbNBQkN - v4KKAAoJENVw8tNOoHk9BFQP/04a1yQb3aOYbNgx+ER9l54wZbUUlReU+ujmlW03 - 12ZW8fFZ0SN2q7xKtE/I9nNl1gjJ7NHTP3FhZ0eNyG+mJeGyrscVKxaAkTV+71e3 - 7n94/qC2bM753X+2160eR7Md+R/itoljStwmib1583rSTTUld1i4FnUTrEhF7MBt - I/+5l7vUK4Hj1RPovHVeHXYfdbrS6wCBi6GsdOfYGfGacZIfM4XLXTkyjVt4Zg0j - rwZ36P1amHky1QyvQ2stkXjCEtP04h3o3EfC1yupNXarO1VXj10/wWYhoGAz6AT2 - Usk6DiaiJqHPy2RwPfKzv7ZrUlMxKrqjPUHcoBf++EjzFtR3LJ0pY2fLwp6Pk4s4 - 18Xwi7r16HnCH/BZgqZVyXAhDV6+U9rAHab/n4b0hcWWaT2SIhsyZKtEMiTMJeq5 - aAMcRSWX+dHO+MzMIBzNu7BO3b+zODD0+XSMsPqeHp3cqfZ3EHobKQPPFucdfjug - Hx2+dbPD3IwJVIilc9Otfz/+JYG4im5p4N6UCwXHbtiuuREC1SQpU9BqEjQAyIiL - gXlE5MSVqXijkrIpYB+K8cR+44nQ4K2kc4ievNqXR6D7XQ3AE76QN84Lby2b5W86 - bbboIy0Bgy+9jgCx0CS7fk1P8zx1dw2FNDVfxZ+s473ZvwP1wdSRZICjZUvM8hx4 - 4kPCiQJVBBMBCAA/AhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIeAQIXgBYhBIq+Tvzw - 9LJL/yqvkNVw8tNOoHk9BQJiOkMeBQkUJ/c7AAoJENVw8tNOoHk9Xx8P/26W8v/v - Exmttzcqh7MlihddXfr2lughSuUBQ8aLsffGHSGIgyqSPlq0Fl5qOCoJ8hYZSBqV - yEfo7iRY7E3K1LGXKDkpup9hC1wMjR0A25eoXwEnD2vEQ/upXXueH05vkcMc165B - cK0kNxas+2amCc3nHJOlfWILXQk4OS+nB0lBWe8H96ppfAaX/G0JiYsa0hjNycZq - 0ftEdCkAJRvSFuu6d3gXH69KLxoNcJOE+99f3wMOuOcX3Xf1k/cwqdJRdEiW8oz8 - Gf5ZRzWcpsXXg6nB2mkahLoRDMM2U+1C6fHbUg4yTvU1AB+F/OYqe1d0hedho0o5 - +WWoTuM/U79+m3NM14qvr0iJP7ytABiEE96nNAz+Q0NDZqA6JoUd7obo8KVjGHEt - 9bRl/8K/zWkdNLoF84tWjEiBCzCKXGEay7lgiIx5f3OvP91CfGL+ILHrk/AZR1eE - M+KI7wB8sJEFF95UoKVua3YzLIFScB4bUEOg6bz8xSSP4a0BWktSm5ws8iCWqOE6 - S9haCppZ7a6k5czQNPJV2bp2eTS4ykFAQLv/mHMS5awIvb8b630Rufn1vZHKCrMf - WdSbBZD7oojxYo1psPlfzN2KUrNXgl7vAUNagJEogMoiYAZ2ML7rTVAC1qnbxQb+ - DeC+r0I98AIY6igIgRbcybH3ccfXYNtcxLUJuQINBFO15mMBEAC5UuLii9ZLz6qH - fIJp35IOW9U8SOf7QFhzXR7NZ3DmJsd3f6Nb/habQFIHjm3K9wbpj+FvaW2oWRlF - VvYdzjUq6c82GUUjW1dnqgUvFwdmM8351n0YQ2TonmyaF882RvsRZrbJ65uvy7SQ - xlouXaAYOdqwLsPxBEOyOnMPSktW5V2UIWyxsNP3sADchWIGq9p5D3Y/loyIMsS1 - dj+TjoQZOKSj7CuRT98+8yhGAY8YBEXu9r3I9o6mDkuPpAljuMc8r09Im6az2egt - K/szKt4Hy1bpSSBZU4W/XR7XwQNywmb3wxjmYT6Od3Mwj0jtzc3gQiH8hcEy3+BO - +NNmyzFVyIwOLziwjmEcw62S57wYKUVnHD2nglMsQa8Ve0e6ABBMEY7zGEGStva5 - 9rfgeh0jUMJiccGiUDTMs0tdkC6knYKbu/fdRqNYFoNuDcSeLEw4DdCuP01l2W4y - Y+fiK6hAcL25amjzc+yYo9eaaqTn6RATbzdhHQZdpAMxY+vNT0+NhP1Zo5gYBMR6 - 5Zp/VhFsf67ijb03FUtdw9N8dHwiR2m8vVA8kO/gCD6wS2p9RdXqrJ9JhnHYWjiV - uXR+f755ZAndyQfRtowMdQIoiXuJEXYw6XN+/BX81gJaynJYc0uw0MnxWQX+A5m8 - HqEsbIFUXBYXPgbwXTm7c4IHGgXXdwARAQABiQI8BBgBAgAmAhsMFiEEir5O/PD0 - skv/Kq+Q1XDy006geT0FAlvR7oMFCQvebyAACgkQ1XDy006geT2Hxw//Zha8j8Uc - 4B+DmHhZIvPmHp9aFI4DWhC7CBDrYKztBz42H6eX+UsBu4p+uBDKdW9xJH+Qt/zF - nf/zB5Bhc/wFceVRCAkWxPdiIQeo5XQGjZeORjle7E9iunTko+5q1q9I7IgqWYrn - jRmulDvRhO7AoUrqGACDrV6t0F1/XPB8seR2i6axFmFlt1qBHasRq11yksdgNYiD - KXaovf7csDGPGOCWEKMX7BFGpdK/dWdNYfH0Arfom0U5TqNfvGtP4yRPx2bcs7/1 - VXPj7IqhBgOtA9pwtMjFki8HGkqj7bB2ErFBOnSwqqNnNcbnhiO6D74SHVGAHhKZ - whaMPDg76EvjAezoLHg7KWYOyUkWJSLa+YoM9r4+PJuEuW/XuaZCNbrAhek+p3pD - ywhElvZe/2UFk619qKzwSbTzk7a90rxLQ2wwtd0vxAW/GyjWl4/kOMZhI5+LAk1l - REucE0fSQxzCTeXu2ObvFR9ic02IYGH3Koz8CrGReEI1J05041Y5IhKxdsvGOD2W - e7ymcblYW4Gz8eYFlLeNJkj/38R7qmNZ028XHzAZDCAWDiTFrnCoglyk+U0JRHfg - HTsdvoc8mBdT/s24LhnfAbpLizlrZZquuOF6NLQSkbuLtmIwf+h9ynEEJxEkGGWg - 7JqB1tMjNHLkRpveO/DTYB+iffpba1nCgumJAjwEGAEIACYCGwwWIQSKvk788PSy - S/8qr5DVcPLTTqB5PQUCYjpDOQUJFCf3VgAKCRDVcPLTTqB5PYDiEADaj1aAdXDb - +XrlhzlGCT3e16RDiE4BjSD1KHZX8ZDABI79JDG0iMN2PpWuViXq7AvWuwgNYdac - WjHsZGgHW82UoPVGKnfEVjjf0lQQIIcgdS5dEV8LamkeIo4vKUX/MZY+Mivk6luP - vCec9Euj/XU1nY6gGq6inpwDtZkNoJlCBune/IIGS82dU8RrSGAHNRZoaDJfdfQm - j7YAOWCUqyzn747yMyuMUOc15iJIgOz1dKN5YwDmFkzjlw+616Aswcp8UA0OfOQ+ - e4THli32BgKTSNeOGhGgx1xCDkt+0gP1L0L2Sqhlr6BnqNF65mQ4j2v6UGY1noCo - jYxFchoa1zEdEiZRr/sRO91XlJtK7HyIAI0cUHKVU+Cayoh//OBQBJnbeZlfh9Qn - 4ead1pTz9bcKIeZleAjlzNG249bGY+82WsFghb4/7U9MYJVePz0m1zJKPkdABZ+R - lSDvhf4ImesfH5UuofZFv1UXmQL4yV7PDXXdy2xhma7YLznyZTUobDoJiZbuO72O - g5HJCpYoNfvGx++Z9naomUWufqi9PWigEMxU8lUtiGaLQrDW3inTOZTTmTnsJiAI - Lhku0Jr4SjCqxoEFydXOGvNV5XB4WXvf+A6JhcZI+/S72ai1CeSgMFiJLAEb2MZ+ - fwPKmQ2cKnCBs5ASj1DkgUcz2c8DTUPVqg== - =i1Tf + mQINBGZpxDsBEACz8yoRBXaJiifaWz3wd4FLSO18mgH7H/+0iNTbV1ZwhgGEtWTF + Z31HfrsbxVgICoMgFYt8WKnc4MHZLIgDfTuCFQpf7PV/VqRBAknZwQKEAjHfrYNz + Q1vy3CeKC1qcKQISEQr7VFf58sOC8GJ54jLLc2rCsg9cXI6yvUFtGwL9Qv7g/NZn + rtLjc4NZIKdIvSt+/PtooQtsz0jfLMdMpMFa41keH3MknIbydBUnGj7eC8ANN/iD + Re2QHAW2KfQh3Ocuh/DpJ0/dwbzXmXfMWHk30E+s31TfdLiFt1Iz5kZDF8iHrDMq + x39/GGmF10y5rfq43V1Ucxm+1tl5Km0JcX6GpPUtgRpfUYAxwxfGfezt4PjYRYH2 + mNxXXPLsnVTvdWPTvS0msSrcTHmnU5His38I6goXI7dLZm0saqoWi3sqEQ8TPS6/ + DkLtYjpb/+dql+KrXD7erd3j8KKflIXn7AEsv+luNk6czGOKgdG9agkklzOHfEPc + xOGmaFfe/1mu8HxgaCuhNAQWlk79ZC+GAm0sBZIQAQRtABgag5vWr16hVix7BPMG + Fp8+caOVv6qfQ7gBmJ3/aso6OzyOxsluVxQRt94EjPTm0xuwb1aYNJOhEj9cPkjQ + XBjo3KN0rwcAViR/fdUzrIV1sn2hms0v5WZ+TDtz1w0OpLZOwe23BDE1+QARAQAB + tEJTYWx0IFByb2plY3QgU2VjdXJpdHkgVGVhbSA8c2FsdHByb2plY3Qtc2VjdXJp + dHkucGRsQGJyb2FkY29tLmNvbT6JAlcEEwEKAEEWIQSZ7ybyZGktJJc6cAfov3an + N2VKBgUCZmnEOwIbAwUJB4TOAAULCQgHAgIiAgYVCgkICwIEFgIDAQIeBwIXgAAK + CRDov3anN2VKBk7rD/9QdcYdNGfk96W906HlVpb3JCwT0t9T7ElP97Ot0YN6LqMj + vVQpxWYi7riUSyt1FtlCAM+hmghImzILF9LKDRCZ1H5UStI/u9T53cZpUZtVW/8R + bUNBCl495UcgioIZG5DsfZ/GdBOgY+hQfdgh7HC8a8A/owCt2hHbnth970NQ+LHb + /0ERLfOHRxozgPBhze8Vqf939KlteM5ljgTw/IkJJIsxJi4C6pQntSHvB3/Bq/Nw + Kf3vk3XYFtVibeQODSVvc6useo+SNGV/wsK/6kvh/vfP9Trv/GMOn/89Bj2aL1PR + M382E6sDB9d22p4ehVgbcOpkwHtr9DGerK9xzfG4aUjLu9qVD5Ep3gqKSsCe+P8z + bpADdVCnk+Vdp3Bi+KI7buSkqfbZ0m9vCY3ei1fMiDiTTjvNliL5QCO6PvYNYiDw + +LLImrQThv55ZRQsRRT7J6A94kwDoI6zcBEalv/aPws0nQHJtgWRUpmy5RcbVu9Z + QBXlUpCzCB+gGaGRE1u0hCfuvkbcG1pXFFBdSUuAK4o4ktiRALVUndELic/PU1nR + jwo/+j0SGw/jTwqVChUfLDZbiAQ2JICoVpZ+e1zQfsxa/yDu2e4D543SvNFHDsxh + bsBeCsopzJSA0n2HAdYvPxOPoWVvZv+U8ZV3EEVOUgsO5//cRJddCgLU89Q4DrkC + DQRmacQ7ARAAsz8jnpfw3DCRxdCVGiqWAtgj8r2gx5n1wJsKsgvyGQdKUtPwlX04 + 7w13lIDT2DwoXFozquYsTn9XkIoWbVckqo0NN/V7/QxIZIYTqRcFXouHTbXDJm5C + tsvfDlnTsaplyRawPU2mhYg39/lzIt8zIjvy5zo/pElkRP5m03nG+ItrsHN6CCvf + ZiRxme6EQdn+aoHh2GtICL8+c3HvQzTHYKxFn84Ibt3uNxwt+Mu6YhG9tkYMQQk5 + SkYA4CYAaw2Lc/g0ee36iqw/5d79M8YcQtHhy5zzqgdEvExjFPdowV1hhFIEkNkM + uqIAknXVesqLLw2hPeYmyhYQqeBKIrWmBhBKX9c0vMYkDDH3T/sSylVhH0QAXP6E + WmLja3E1ov6pt6j7j/wWzC9LSMFDJI2yWCeOE1oea5D89tH6XvsGRTiog62zF/9a + 77197iIa0+o91chp4iLkzDvuK8pVujPx8bNsK8jlJ+OW73NmliCVg+hecoFLNsri + /TsBngFNVcu79Q1XfyvoDdR2C09ItCBEZGt6LOlq/+ATUw1aBz6L1hvLBtiR3Hfu + X31YlbxdvVPjlzg6O6GXSfnokNTWv2mVXWTRIrP0RrKvMyiNPXVW7EunUuXI0Axk + Xg3E5kAjKXkBXzoCTCVz/sXPLjvjI0x3Z7obgPpcTi9h5DIX6PFyK/kAEQEAAYkC + PAQYAQoAJhYhBJnvJvJkaS0klzpwB+i/dqc3ZUoGBQJmacQ7AhsMBQkHhM4AAAoJ + EOi/dqc3ZUoGDeAQAKbyiHA1sl0fnvcZxoZ3mWA/Qesddp7Nv2aEW8I3hAJoTVml + ZvMxk8leZgsQJtSsVDNnxeyW+WCIUkhxmd95UlkTTj5mpyci1YrxAltPJ2TWioLe + F2doP8Y+4iGnaV+ApzWG33sLr95z37RKVdMuGk/O5nLMeWnSPA7HHWJCxECMm0SH + uI8aby8w2aBZ1kOMFB/ToEEzLBu9fk+zCzG3uH8QhdciMENVhsyBSULIrmwKglyI + VQwj2dXHyekQh7QEHV+CdKMfs3ZOANwm52OwjaK0dVb3IMFGvlUf4UXXfcXwLAkj + vW+Ju4kLGxVQpOlh1EBain9WOaHZGh6EGuTpjJO32PyRq8iSMNb8coeonoPFWrE/ + A5dy3z5x5CZhJ6kyNwYs/9951r30Ct9qNZo9WZwp8AGQVs+J9XEYnZIWXnO1hdKs + dRStPvY7VqS500t8eWqWRfCLgofZAb9Fv7SwTPQ2G7bOuTXmQKAIEkU9vzo5XACu + AtR/9bC9ghNnlNuH4xiViBclrq2dif/I2ZwItpQHjuCDeMKz9kdADRI0tuNPpRHe + QP1YpURW+I+PYZzNgbnwzl6Bxo7jCHFgG6BQ0ih5sVwEDhlXjSejd8CNMYEy3ElL + xJLUpltwXLZSrJEXYjtJtnh0om71NXes0OyWE1cL4+U6WA9Hho6xedjk2bai + =pPmt -----END PGP PUBLIC KEY BLOCK----- The SaltStack Security Team is available at saltproject-security.pdl@broadcom.com for From 6fe58ffd30560ef83b8bd40342a81b3e1f6c228d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 23 May 2024 15:05:16 -0700 Subject: [PATCH 076/160] Enable fips supported algorithms --- doc/ref/configuration/master.rst | 14 + doc/ref/configuration/minion.rst | 22 + salt/channel/client.py | 36 +- salt/channel/server.py | 147 +++--- salt/config/__init__.py | 9 + salt/crypt.py | 445 ++++++++++-------- salt/master.py | 4 +- salt/transport/zeromq.py | 14 +- tests/pytests/unit/crypt/__init__.py | 306 ++++++++++++ .../unit/crypt/test_crypt_cryptography.py | 286 +++++++++++ 10 files changed, 986 insertions(+), 297 deletions(-) create mode 100644 tests/pytests/unit/crypt/test_crypt_cryptography.py diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index e59efd47f71..1eb1f2b08b6 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -2025,6 +2025,20 @@ The number of seconds between AES key rotations on the master. .. conf_master:: ssl + +``publish_signing_algorithm`` +----------------------------- + +.. versionadded:: 3006.9 + +Default: PKCS1v15-SHA1 + +Then RSA signing algorithm used by this minion when connecting to the +master's request channel. Valid values are ``PKCS1v15-SHA1`` and +``PKCS1v15-SHA224``. Minions must be at version ``3006.9`` or greater if this +is changed from the default setting. + + ``ssl`` ------- diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst index 925b54cc47f..826c705b988 100644 --- a/doc/ref/configuration/minion.rst +++ b/doc/ref/configuration/minion.rst @@ -3171,6 +3171,28 @@ constant names without ssl module prefix: ``CERT_REQUIRED`` or ``PROTOCOL_SSLv23 certfile: ssl_version: PROTOCOL_TLSv1_2 +``encryption_algorithm`` +------------------------ + +.. versionadded:: 3006.9 + +Default: OAEP-SHA1 + +Then RSA encryption algorithm used by this minion when connecting to the +master's request channel. Valid values are ``OAEP-SHA1`` and ``OAEP-SHA224`` + + +``signing_algorithm`` +------------------------ + +.. versionadded:: 3006.9 + +Default: PKCS1v15-SHA1 + +Then RSA signing algorithm used by this minion when connecting to the +master's request channel. Valid values are ``PKCS1v15-SHA1`` and +``PKCS1v15-SHA224`` + Reactor Settings ================ diff --git a/salt/channel/client.py b/salt/channel/client.py index 310804c84cf..c60a41c063a 100644 --- a/salt/channel/client.py +++ b/salt/channel/client.py @@ -22,21 +22,6 @@ import salt.utils.stringutils import salt.utils.verify from salt.utils.asynchronous import SyncWrapper -try: - from M2Crypto import RSA - - HAS_M2 = True -except ImportError: - HAS_M2 = False - try: - from Cryptodome.Cipher import PKCS1_OAEP - except ImportError: - try: - from Crypto.Cipher import PKCS1_OAEP # nosec - except ImportError: - pass - - log = logging.getLogger(__name__) REQUEST_CHANNEL_TIMEOUT = 60 @@ -166,11 +151,15 @@ class AsyncReqChannel: return self.transport.ttype def _package_load(self, load): - return { + ret = { "enc": self.crypt, "load": load, "version": 2, } + if self.crypt == "aes": + ret["enc_algo"] = self.opts["encryption_algorithm"] + ret["sig_algo"] = self.opts["signing_algorithm"] + return ret @salt.ext.tornado.gen.coroutine def _send_with_retry(self, load, tries, timeout): @@ -221,11 +210,7 @@ class AsyncReqChannel: tries, timeout, ) - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment - aes = cipher.decrypt(ret["key"]) + aes = key.decrypt(ret["key"], self.opts["encryption_algorithm"]) # Decrypt using the public key. pcrypt = salt.crypt.Crypticle(self.opts, aes) @@ -248,7 +233,9 @@ class AsyncReqChannel: raise salt.ext.tornado.gen.Return(data["pillar"]) def verify_signature(self, data, sig): - return salt.crypt.verify_signature(self.master_pubkey_path, data, sig) + return salt.crypt.PublicKey(self.master_pubkey_path).verify( + data, sig, self.opts["signing_algorithm"] + ) @salt.ext.tornado.gen.coroutine def _crypted_transfer(self, load, timeout, raw=False): @@ -583,7 +570,10 @@ class AsyncPubChannel: # Verify that the signature is valid if not salt.crypt.verify_signature( - self.master_pubkey_path, payload["load"], payload.get("sig") + self.master_pubkey_path, + payload["load"], + payload.get("sig"), + algorithm=payload["sig_algo"], ): raise salt.crypt.AuthenticationError( "Message signature failed to validate." diff --git a/salt/channel/server.py b/salt/channel/server.py index 14d3da30125..7ffb19dde44 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -25,18 +25,6 @@ import salt.utils.verify from salt.exceptions import SaltDeserializationError from salt.utils.cache import CacheCli -try: - from M2Crypto import RSA - - HAS_M2 = True -except ImportError: - HAS_M2 = False - try: - from Cryptodome.Cipher import PKCS1_OAEP - except ImportError: - from Crypto.Cipher import PKCS1_OAEP # nosec - - log = logging.getLogger(__name__) @@ -188,13 +176,24 @@ class ReqServerChannel: req_opts["tgt"], nonce, sign_messages, + payload.get("enc_algo", salt.crypt.OAEP_SHA1), + payload.get("sig_algo", salt.crypt.PKCS1v15_SHA1), ), ) log.error("Unknown req_fun %s", req_fun) # always attempt to return an error to the minion raise salt.ext.tornado.gen.Return("Server-side exception handling payload") - def _encrypt_private(self, ret, dictkey, target, nonce=None, sign_messages=True): + def _encrypt_private( + self, + ret, + dictkey, + target, + nonce=None, + sign_messages=True, + encryption_algorithm=salt.crypt.OAEP_SHA1, + signing_algorithm=salt.crypt.PKCS1v15_SHA1, + ): """ The server equivalent of ReqChannel.crypted_transfer_decode_dictentry """ @@ -203,19 +202,14 @@ class ReqServerChannel: key = salt.crypt.Crypticle.generate_key_string() pcrypt = salt.crypt.Crypticle(self.opts, key) try: - pub = salt.crypt.get_rsa_pub_key(pubfn) + pub = salt.crypt.PublicKey(pubfn) except (ValueError, IndexError, TypeError): return self.crypticle.dumps({}) except OSError: log.error("AES key not found") return {"error": "AES key not found"} pret = {} - key = salt.utils.stringutils.to_bytes(key) - if HAS_M2: - pret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(pub) # pylint: disable=used-before-assignment - pret["key"] = cipher.encrypt(key) + pret["key"] = pub.encrypt(key, encryption_algorithm) if ret is False: ret = {} if sign_messages: @@ -227,20 +221,24 @@ class ReqServerChannel: master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") signed_msg = { "data": tosign, - "sig": salt.crypt.sign_message(master_pem_path, tosign), + "sig": salt.crypt.PrivateKey(master_pem_path).sign( + tosign, algorithm=signing_algorithm + ), } pret[dictkey] = pcrypt.dumps(signed_msg) else: pret[dictkey] = pcrypt.dumps(ret) return pret - def _clear_signed(self, load): + def _clear_signed(self, load, algorithm): master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") tosign = salt.payload.dumps(load) return { "enc": "clear", "load": tosign, - "sig": salt.crypt.sign_message(master_pem_path, tosign), + "sig": salt.crypt.PrivateKey(master_pem_path).sign( + tosign, algorithm=algorithm + ), } def _update_aes(self): @@ -297,10 +295,15 @@ class ReqServerChannel: """ import salt.master + enc_algo = load.get("enc_algo", salt.crypt.OAEP_SHA1) + sig_algo = load.get("sig_algo", salt.crypt.PKCS1v15_SHA1) + if not salt.utils.verify.valid_id(self.opts, load["id"]): log.info("Authentication request from invalid id %s", load["id"]) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} log.info("Authentication request from %s", load["id"]) @@ -342,7 +345,7 @@ class ReqServerChannel: ) if sign_messages: return self._clear_signed( - {"ret": "full", "nonce": load["nonce"]} + {"ret": "full", "nonce": load["nonce"]}, sig_algo ) else: return {"enc": "clear", "load": {"ret": "full"}} @@ -373,7 +376,9 @@ class ReqServerChannel: if self.opts.get("auth_events") is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} elif os.path.isfile(pubfn): @@ -401,7 +406,7 @@ class ReqServerChannel: ) if sign_messages: return self._clear_signed( - {"ret": False, "nonce": load["nonce"]} + {"ret": False, "nonce": load["nonce"]}, sig_algo ) else: return {"enc": "clear", "load": {"ret": False}} @@ -415,7 +420,9 @@ class ReqServerChannel: if self.opts.get("auth_events") is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} @@ -450,7 +457,8 @@ class ReqServerChannel: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) if sign_messages: return self._clear_signed( - {"ret": key_result, "nonce": load["nonce"]} + {"ret": key_result, "nonce": load["nonce"]}, + sig_algo, ) else: return {"enc": "clear", "load": {"ret": key_result}} @@ -478,7 +486,9 @@ class ReqServerChannel: if self.opts.get("auth_events") is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} @@ -510,7 +520,7 @@ class ReqServerChannel: ) if sign_messages: return self._clear_signed( - {"ret": False, "nonce": load["nonce"]} + {"ret": False, "nonce": load["nonce"]}, sig_algo ) else: return {"enc": "clear", "load": {"ret": False}} @@ -534,7 +544,7 @@ class ReqServerChannel: ) if sign_messages: return self._clear_signed( - {"ret": True, "nonce": load["nonce"]} + {"ret": True, "nonce": load["nonce"]}, sig_algo ) else: return {"enc": "clear", "load": {"ret": True}} @@ -561,7 +571,7 @@ class ReqServerChannel: ) if sign_messages: return self._clear_signed( - {"ret": False, "nonce": load["nonce"]} + {"ret": False, "nonce": load["nonce"]}, sig_algo ) else: return {"enc": "clear", "load": {"ret": False}} @@ -575,7 +585,9 @@ class ReqServerChannel: if self.opts.get("auth_events") is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} @@ -597,7 +609,9 @@ class ReqServerChannel: elif not load["pub"]: log.error("Public key is empty: %s", load["id"]) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} @@ -610,16 +624,16 @@ class ReqServerChannel: # The key payload may sometimes be corrupt when using auto-accept # and an empty request comes in try: - pub = salt.crypt.get_rsa_pub_key(pubfn) + pub = salt.crypt.PublicKey(pubfn) except salt.crypt.InvalidKeyError as err: log.error('Corrupt public key "%s": %s', pubfn, err) if sign_messages: - return self._clear_signed({"ret": False, "nonce": load["nonce"]}) + return self._clear_signed( + {"ret": False, "nonce": load["nonce"]}, sig_algo + ) else: return {"enc": "clear", "load": {"ret": False}} - if not HAS_M2: - cipher = PKCS1_OAEP.new(pub) ret = { "enc": "pub", "pub_key": self.master_key.get_pub_str(), @@ -642,61 +656,42 @@ class ReqServerChannel: key_pass = salt.utils.sdb.sdb_get( self.opts["signing_key_pass"], self.opts ) - log.debug("Signing master public key before sending") pub_sign = salt.crypt.sign_message( - self.master_key.get_sign_paths()[1], ret["pub_key"], key_pass + self.master_key.get_sign_paths()[1], + ret["pub_key"], + key_pass, + algorithm=sig_algo, ) ret.update({"pub_sig": binascii.b2a_base64(pub_sign)}) - if not HAS_M2: - mcipher = PKCS1_OAEP.new(self.master_key.key) if self.opts["auth_mode"] >= 2: if "token" in load: try: - if HAS_M2: - mtoken = self.master_key.key.private_decrypt( - load["token"], RSA.pkcs1_oaep_padding - ) - else: - mtoken = mcipher.decrypt(load["token"]) + mtoken = self.master_key.key.decrypt(load["token"], enc_algo) aes = "{}_|-{}".format( salt.master.SMaster.secrets["aes"]["secret"].value, mtoken ) - except Exception: # pylint: disable=broad-except + except Exception as exc: # pylint: disable=broad-except + log.warning("Token failed to decrypt %s", exc) # Token failed to decrypt, send back the salty bacon to # support older minions - pass else: aes = salt.master.SMaster.secrets["aes"]["secret"].value - if HAS_M2: - ret["aes"] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding) - else: - ret["aes"] = cipher.encrypt(aes) + ret["aes"] = pub.encrypt(aes, enc_algo) else: if "token" in load: try: - if HAS_M2: - mtoken = self.master_key.key.private_decrypt( - load["token"], RSA.pkcs1_oaep_padding - ) - ret["token"] = pub.public_encrypt( - mtoken, RSA.pkcs1_oaep_padding - ) - else: - mtoken = mcipher.decrypt(load["token"]) - ret["token"] = cipher.encrypt(mtoken) - except Exception: # pylint: disable=broad-except + mtoken = self.master_key.key.decrypt(load["token"], enc_algo) + ret["token"] = pub.encrypt(mtoken, enc_algo) + except Exception as exc: # pylint: disable=broad-except # Token failed to decrypt, send back the salty bacon to # support older minions - pass + log.warning("Token failed to decrypt: %s", exc) aes = salt.master.SMaster.secrets["aes"]["secret"].value - if HAS_M2: - ret["aes"] = pub.public_encrypt(aes, RSA.pkcs1_oaep_padding) - else: - ret["aes"] = cipher.encrypt(aes) + ret["aes"] = pub.encrypt(aes, enc_algo) # Be aggressive about the signature digest = salt.utils.stringutils.to_bytes(hashlib.sha256(aes).hexdigest()) @@ -706,7 +701,7 @@ class ReqServerChannel: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) if sign_messages: ret["nonce"] = load["nonce"] - return self._clear_signed(ret) + return self._clear_signed(ret, sig_algo) return ret def close(self): @@ -761,6 +756,7 @@ class PubServerChannel: self.event = salt.utils.event.get_event("master", opts=self.opts, listen=False) self.ckminions = salt.utils.minions.CkMinions(self.opts) self.present = {} + self.master_key = salt.crypt.MasterKeys(self.opts) def close(self): self.transport.close() @@ -792,6 +788,7 @@ class PubServerChannel: secrets = kwargs.get("secrets", None) if secrets is not None: salt.master.SMaster.secrets = secrets + self.master_key = salt.crypt.MasterKeys(self.opts) self.transport.publish_daemon( self.publish_payload, self.presence_callback, self.remove_presence_callback ) @@ -883,7 +880,11 @@ class PubServerChannel: if self.opts["sign_pub_messages"]: master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") log.debug("Signing data packet") - payload["sig"] = salt.crypt.sign_message(master_pem_path, payload["load"]) + payload["sig_algo"] = self.opts["publish_signing_algorithm"] + payload["sig"] = salt.crypt.PrivateKey( + self.master_key.rsa_path, + ).sign(payload["load"], self.opts["publish_signing_algorithm"]) + int_payload = {"payload": salt.payload.dumps(payload)} # If topics are upported, target matching has to happen master side diff --git a/salt/config/__init__.py b/salt/config/__init__.py index a3e5a8bfdb7..b80820f9745 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -995,6 +995,12 @@ VALID_OPTS = immutabletypes.freeze( "fileserver_interval": int, "request_channel_timeout": int, "request_channel_tries": int, + # RSA encryption for minion + "encryption_algorithm": str, + # RSA signing for minion + "signing_algorithm": str, + # Master publish channel signing + "publish_signing_algorithm": str, } ) @@ -1303,6 +1309,8 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze( "reactor_niceness": None, "fips_mode": False, "features": {}, + "encryption_algorithm": "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA1", } ) @@ -1649,6 +1657,7 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze( "maintenance_interval": 3600, "fileserver_interval": 3600, "features": {}, + "publish_signing_algorithm": "PKCS1v15-SHA224", } ) diff --git a/salt/crypt.py b/salt/crypt.py index 976cb027c92..deaa871eef7 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -43,42 +43,46 @@ from salt.exceptions import ( ) try: - from M2Crypto import BIO, EVP, RSA + import cryptography.exceptions + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.asymmetric import padding, rsa + from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes - HAS_M2 = True + HAS_CRYPTOGRAPHY = True except ImportError: - HAS_M2 = False - -if not HAS_M2: - try: - from Cryptodome import Random - from Cryptodome.Cipher import AES, PKCS1_OAEP - from Cryptodome.Cipher import PKCS1_v1_5 as PKCS1_v1_5_CIPHER - from Cryptodome.Hash import SHA - from Cryptodome.PublicKey import RSA - from Cryptodome.Signature import PKCS1_v1_5 - - HAS_CRYPTO = True - except ImportError: - HAS_CRYPTO = False - -if not HAS_M2 and not HAS_CRYPTO: - try: - # let this be imported, if possible - from Crypto import Random # nosec - from Crypto.Cipher import AES, PKCS1_OAEP # nosec - from Crypto.Cipher import PKCS1_v1_5 as PKCS1_v1_5_CIPHER # nosec - from Crypto.Hash import SHA # nosec - from Crypto.PublicKey import RSA # nosec - from Crypto.Signature import PKCS1_v1_5 # nosec - - HAS_CRYPTO = True - except ImportError: - HAS_CRYPTO = False + HAS_CRYPTOGRAPHY = False log = logging.getLogger(__name__) +OAEP = "OAEP" +PKCS1v15 = "PKCS1v15" + +SHA1 = "SHA1" +SHA224 = "SHA224" + +OAEP_SHA1 = f"{OAEP}-{SHA1}" +OAEP_SHA224 = f"{OAEP}-{SHA224}" + +PKCS1v15_SHA1 = f"{PKCS1v15}-{SHA1}" +PKCS1v15_SHA224 = f"{PKCS1v15}-{SHA224}" + + +VALID_HASHES = ( + SHA1, + SHA224, +) + +VALID_PADDING_FOR_SIGNING = (PKCS1v15,) +VALID_PADDING_FOR_ENCRYPTION = (OAEP,) + + +def fips_enabled(): + if HAS_CRYPTOGRAPHY: + import cryptography.hazmat.backends.openssl.backend + + return cryptography.hazmat.backends.openssl.backend._fips_enabled + def clean_key(key): """ @@ -114,7 +118,7 @@ def dropfile(cachedir, user=None): pass -def gen_keys(keydir, keyname, keysize, user=None, passphrase=None): +def gen_keys(keydir, keyname, keysize, user=None, passphrase=None, e=65537): """ Generate a RSA public keypair for use with salt @@ -131,11 +135,8 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None): priv = f"{base}.pem" pub = f"{base}.pub" - if HAS_M2: - gen = RSA.gen_key(keysize, 65537, lambda: None) - else: - salt.utils.crypt.reinit_crypto() - gen = RSA.generate(bits=keysize, e=65537) + gen = rsa.generate_private_key(e, keysize) + if os.path.isfile(priv): # Between first checking and the generation another process has made # a key! Use the winner's key @@ -150,24 +151,30 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None): ) with salt.utils.files.set_umask(0o277): - if HAS_M2: - # if passphrase is empty or None use no cipher - if not passphrase: - gen.save_pem(priv, cipher=None) + with salt.utils.files.fopen(priv, "wb+") as f: + if passphrase: + enc = serialization.BestAvailableEncryption(passphrase.encode()) + _format = serialization.PrivateFormat.TraditionalOpenSSL + if fips_enabled(): + _format = serialization.PrivateFormat.PKCS8 else: - gen.save_pem( - priv, - cipher="des_ede3_cbc", - callback=lambda x: salt.utils.stringutils.to_bytes(passphrase), - ) - else: - with salt.utils.files.fopen(priv, "wb+") as f: - f.write(gen.exportKey("PEM", passphrase)) - if HAS_M2: - gen.save_pub_key(pub) - else: - with salt.utils.files.fopen(pub, "wb+") as f: - f.write(gen.publickey().exportKey("PEM")) + enc = serialization.NoEncryption() + _format = serialization.PrivateFormat.TraditionalOpenSSL + pem = gen.private_bytes( + encoding=serialization.Encoding.PEM, + format=_format, + encryption_algorithm=enc, + ) + f.write(pem) + + pubkey = gen.public_key() + with salt.utils.files.fopen(pub, "wb+") as f: + pem = pubkey.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + f.write(pem) + os.chmod(priv, 0o400) if user: try: @@ -183,6 +190,105 @@ def gen_keys(keydir, keyname, keysize, user=None, passphrase=None): return priv +class BaseKey: + + @staticmethod + def parse_padding_for_signing(algorithm): + _pad, _hash = algorithm.split("-", 1) + if _pad not in VALID_PADDING_FOR_SIGNING: + raise Exception("Invalid padding algorithm") + return getattr(padding, _pad) + + @staticmethod + def parse_padding_for_encryption(algorithm): + _pad, _hash = algorithm.split("-", 1) + if _pad not in VALID_PADDING_FOR_ENCRYPTION: + raise Exception("Invalid padding algorithm") + return getattr(padding, _pad) + + @staticmethod + def parse_hash(algorithm): + _pad, _hash = algorithm.split("-", 1) + if _hash not in VALID_HASHES: + raise Exception("Invalid hashing algorithm") + return getattr(hashes, _hash) + + +class PrivateKey(BaseKey): + + def __init__(self, path, passphrase=None): + self.key = get_rsa_key(path, passphrase) + + def encrypt(self, data): + pem = self.key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption(), + ) + return salt.utils.rsax931.RSAX931Signer(pem).sign(data) + + def sign(self, data, algorithm=PKCS1v15_SHA1): + _padding = self.parse_padding_for_signing(algorithm) + _hash = self.parse_hash(algorithm) + return self.key.sign(salt.utils.stringutils.to_bytes(data), _padding(), _hash()) + + def decrypt(self, data, algorithm=OAEP_SHA1): + _padding = self.parse_padding_for_encryption(algorithm) + _hash = self.parse_hash(algorithm) + return self.key.decrypt( + data, + _padding( + mgf=padding.MGF1(algorithm=_hash()), + algorithm=_hash(), + label=None, + ), + ) + + +class PublicKey(BaseKey): + def __init__(self, path): + with salt.utils.files.fopen(path, "rb") as fp: + try: + self.key = serialization.load_pem_public_key(fp.read()) + except ValueError as exc: + raise InvalidKeyError("Invalid key") + + def encrypt(self, data, algorithm=OAEP_SHA1): + _padding = self.parse_padding_for_encryption(algorithm) + _hash = self.parse_hash(algorithm) + bdata = salt.utils.stringutils.to_bytes(data) + return self.key.encrypt( + bdata, + _padding( + mgf=padding.MGF1(algorithm=_hash()), + algorithm=_hash(), + label=None, + ), + ) + + def verify(self, data, signature, algorithm=PKCS1v15_SHA1): + _padding = self.parse_padding_for_signing(algorithm) + _hash = self.parse_hash(algorithm) + try: + self.key.verify( + salt.utils.stringutils.to_bytes(signature), + salt.utils.stringutils.to_bytes(data), + _padding(), + _hash(), + ) + except cryptography.exceptions.InvalidSignature: + return False + return True + + def decrypt(self, data): + pem = self.key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + verifier = salt.utils.rsax931.RSAX931Verifier(pem) + return verifier.verify(data) + + @salt.utils.decorators.memoize def _get_key_with_evict(path, timestamp, passphrase): """ @@ -193,12 +299,26 @@ def _get_key_with_evict(path, timestamp, passphrase): modified then the params are different and the key is loaded from disk. """ log.debug("salt.crypt._get_key_with_evict: Loading private key") - if HAS_M2: - key = RSA.load_key(path, lambda x: bytes(passphrase)) + if passphrase: + password = passphrase.encode() else: - with salt.utils.files.fopen(path) as f: - key = RSA.importKey(f.read(), passphrase) - return key + password = None + with salt.utils.files.fopen(path, "rb") as f: + try: + return serialization.load_pem_private_key( + f.read(), + password=password, + ) + except BaseException as exc: + log.error("Exception is %r", exc) + if ( + exc.__class__.__module__ == "pyo3_runtime" + and exc.__class__.__name__ == "PanicException" + ): + if 'reason: "unsupported"' in exc.args[0]: + log.error("Unsupported key") + raise InvalidKeyError("Unsupported encryption algorithm") + raise def get_rsa_key(path, passphrase): @@ -221,61 +341,29 @@ def get_rsa_pub_key(path): Read a public key off the disk. """ log.debug("salt.crypt.get_rsa_pub_key: Loading public key") - if HAS_M2: - with salt.utils.files.fopen(path, "rb") as f: - data = f.read().replace(b"RSA ", b"") - bio = BIO.MemoryBuffer(data) - try: - key = RSA.load_pub_key_bio(bio) - except RSA.RSAError: - raise InvalidKeyError("Encountered bad RSA public key") - else: - with salt.utils.files.fopen(path) as f: - try: - key = RSA.importKey(f.read()) - except (ValueError, IndexError, TypeError): - raise InvalidKeyError("Encountered bad RSA public key") - return key + try: + with salt.utils.files.fopen(path, "rb") as fp: + return serialization.load_pem_public_key(fp.read()) + except ValueError: + raise InvalidKeyError("Encountered bad RSA public key") + except cryptography.exceptions.UnsupportedAlgorithm: + raise InvalidKeyError("Unsupported key algorithm") -def sign_message(privkey_path, message, passphrase=None): +def sign_message(privkey_path, message, passphrase=None, algorithm=PKCS1v15_SHA1): """ Use Crypto.Signature.PKCS1_v1_5 to sign a message. Returns the signature. """ - key = get_rsa_key(privkey_path, passphrase) - log.debug("salt.crypt.sign_message: Signing message.") - if HAS_M2: - md = EVP.MessageDigest("sha1") - md.update(salt.utils.stringutils.to_bytes(message)) - digest = md.final() - return key.sign(digest) - else: - signer = PKCS1_v1_5.new(key) - return signer.sign(SHA.new(salt.utils.stringutils.to_bytes(message))) + return PrivateKey(privkey_path, passphrase).sign(message, algorithm) -def verify_signature(pubkey_path, message, signature): +def verify_signature(pubkey_path, message, signature, algorithm=PKCS1v15_SHA1): """ Use Crypto.Signature.PKCS1_v1_5 to verify the signature on a message. Returns True for valid signature. """ log.debug("salt.crypt.verify_signature: Loading public key") - pubkey = get_rsa_pub_key(pubkey_path) - log.debug("salt.crypt.verify_signature: Verifying signature") - if HAS_M2: - md = EVP.MessageDigest("sha1") - md.update(salt.utils.stringutils.to_bytes(message)) - digest = md.final() - try: - return pubkey.verify(digest, signature) - except RSA.RSAError as exc: - log.debug("Signature verification failed: %s", exc.args[0]) - return False - else: - verifier = PKCS1_v1_5.new(pubkey) - return verifier.verify( - SHA.new(salt.utils.stringutils.to_bytes(message)), signature - ) + return PublicKey(pubkey_path).verify(message, signature, algorithm) def gen_signature(priv_path, pub_path, sign_path, passphrase=None): @@ -318,40 +406,15 @@ def private_encrypt(key, message): :rtype: str :return: The signature, or an empty string if the signature operation failed """ - if HAS_M2: - return key.private_encrypt(message, salt.utils.rsax931.RSA_X931_PADDING) - else: - signer = salt.utils.rsax931.RSAX931Signer(key.exportKey("PEM")) - return signer.sign(message) - - -def public_decrypt(pub, message): - """ - Verify an M2Crypto-compatible signature - - :param Crypto.PublicKey.RSA._RSAobj key: The RSA public key object - :param str message: The signed message to verify - :rtype: str - :return: The message (or digest) recovered from the signature, or an - empty string if the verification failed - """ - if HAS_M2: - return pub.public_decrypt(message, salt.utils.rsax931.RSA_X931_PADDING) - else: - verifier = salt.utils.rsax931.RSAX931Verifier(pub.exportKey("PEM")) - return verifier.verify(message) + return key.encrypt(message) def pwdata_decrypt(rsa_key, pwdata): - if HAS_M2: - key = RSA.load_key_string(salt.utils.stringutils.to_bytes(rsa_key, "ascii")) - password = key.private_decrypt(pwdata, RSA.pkcs1_padding) - else: - dsize = SHA.digest_size - sentinel = Random.new().read(15 + dsize) - key_obj = RSA.importKey(rsa_key) - key_obj = PKCS1_v1_5_CIPHER.new(key_obj) - password = key_obj.decrypt(pwdata, sentinel) + key = serialization.load_pem_private_key(rsa_key.encode(), password=None) + password = key.decrypt( + pwdata, + padding.PKCS1v15(), + ) return salt.utils.stringutils.to_unicode(password) @@ -442,18 +505,21 @@ class MasterKeys(dict): self.opts.get("user"), passphrase, ) - if HAS_M2: - key_error = RSA.RSAError - else: - key_error = ValueError try: - key = get_rsa_key(path, passphrase) - except key_error as e: + key = PrivateKey(path, passphrase) + except ValueError as e: + message = f"Unable to read key: {path}; file may be corrupt" + except TypeError as e: message = f"Unable to read key: {path}; passphrase may be incorrect" - log.error(message) - raise MasterExit(message) - log.debug("Loaded %s key: %s", name, path) - return key + except InvalidKeyError as e: + message = f"Unable to read key: {path}; key contains unsupported algorithm" + except cryptography.exceptions.UnsupportedAlgorithm as e: + message = f"Unable to read key: {path}; key contains unsupported algorithm" + else: + log.debug("Loaded %s key: %s", name, path) + return key + log.error(message) + raise MasterExit(message) def get_pub_str(self, name="master"): """ @@ -462,12 +528,14 @@ class MasterKeys(dict): """ path = os.path.join(self.opts["pki_dir"], name + ".pub") if not os.path.isfile(path): - key = self.__get_keys() - if HAS_M2: - key.save_pub_key(path) - else: - with salt.utils.files.fopen(path, "wb+") as wfh: - wfh.write(key.publickey().exportKey("PEM")) + pubkey = self.key.public_key() + with salt.utils.files.fopen(path, "wb+") as f: + f.write( + pubkey.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + ) with salt.utils.files.fopen(path) as rfh: return clean_key(rfh.read()) @@ -815,7 +883,10 @@ class AsyncAuth: master_pubkey_path = os.path.join(self.opts["pki_dir"], self.mpub) if os.path.exists(master_pubkey_path) and not verify_signature( - master_pubkey_path, clear_signed_data, clear_signature + master_pubkey_path, + clear_signed_data, + clear_signature, + algorithm=self.opts["signing_algorithm"], ): log.critical("The payload signature did not validate.") raise SaltClientError("Invalid signature") @@ -903,7 +974,7 @@ class AsyncAuth: self.opts["keysize"], self.opts.get("user"), ) - key = get_rsa_key(self.rsa_path, None) + key = PrivateKey(self.rsa_path, None) log.debug("Loaded minion key: %s", self.rsa_path) return key @@ -931,6 +1002,8 @@ class AsyncAuth: payload["cmd"] = "_auth" payload["id"] = self.opts["id"] payload["nonce"] = uuid.uuid4().hex + payload["enc_algo"] = self.opts["encryption_algorithm"] + payload["sig_algo"] = self.opts["signing_algorithm"] if "autosign_grains" in self.opts: autosign_grains = {} for grain in self.opts["autosign_grains"]: @@ -938,16 +1011,14 @@ class AsyncAuth: payload["autosign_grains"] = autosign_grains try: pubkey_path = os.path.join(self.opts["pki_dir"], self.mpub) - pub = get_rsa_pub_key(pubkey_path) - if HAS_M2: - payload["token"] = pub.public_encrypt( - self.token, RSA.pkcs1_oaep_padding - ) - else: - cipher = PKCS1_OAEP.new(pub) - payload["token"] = cipher.encrypt(self.token) - except Exception: # pylint: disable=broad-except - pass + pub = PublicKey(pubkey_path) + payload["token"] = pub.encrypt( + self.token, self.opts["encryption_algorithm"] + ) + except FileNotFoundError: + log.debug("Master public key not found") + except Exception as exc: # pylint: disable=broad-except + log.debug("Exception while encrypting token %s", exc) with salt.utils.files.fopen(self.pub_path) as f: payload["pub"] = clean_key(f.read()) return payload @@ -979,25 +1050,19 @@ class AsyncAuth: log.warning("Auth Called: %s", "".join(traceback.format_stack())) else: log.debug("Decrypting the current master AES key") + key = self.get_keys() - if HAS_M2: - key_str = key.private_decrypt(payload["aes"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - key_str = cipher.decrypt(payload["aes"]) + key_str = key.decrypt(payload["aes"], self.opts["encryption_algorithm"]) if "sig" in payload: m_path = os.path.join(self.opts["pki_dir"], self.mpub) if os.path.exists(m_path): try: - mkey = get_rsa_pub_key(m_path) + mkey = PublicKey(m_path) except Exception: # pylint: disable=broad-except return "", "" digest = hashlib.sha256(key_str).hexdigest() digest = salt.utils.stringutils.to_bytes(digest) - if HAS_M2: - m_digest = public_decrypt(mkey, payload["sig"]) - else: - m_digest = public_decrypt(mkey.publickey(), payload["sig"]) + m_digest = mkey.decrypt(payload["sig"]) if m_digest != digest: return "", "" else: @@ -1009,12 +1074,7 @@ class AsyncAuth: return key_str.split("_|-") else: if "token" in payload: - if HAS_M2: - token = key.private_decrypt( - payload["token"], RSA.pkcs1_oaep_padding - ) - else: - token = cipher.decrypt(payload["token"]) + token = key.decrypt(payload["token"], self.opts["encryption_algorithm"]) return key_str, token elif not master_pub: return key_str, "" @@ -1034,7 +1094,12 @@ class AsyncAuth: ) if os.path.isfile(path): - res = verify_signature(path, message, binascii.a2b_base64(sig)) + res = verify_signature( + path, + message, + binascii.a2b_base64(sig), + algorithm=self.opts["signing_algorithm"], + ) else: log.error( "Verification public key %s does not exist. You need to " @@ -1464,15 +1529,10 @@ class Crypticle: pad = self.AES_BLOCK_SIZE - len(data) % self.AES_BLOCK_SIZE data = data + salt.utils.stringutils.to_bytes(pad * chr(pad)) iv_bytes = os.urandom(self.AES_BLOCK_SIZE) - if HAS_M2: - cypher = EVP.Cipher( - alg="aes_192_cbc", key=aes_key, iv=iv_bytes, op=1, padding=False - ) - encr = cypher.update(data) - encr += cypher.final() - else: - cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes) - encr = cypher.encrypt(data) + cipher = Cipher(algorithms.AES(aes_key), modes.CBC(iv_bytes)) + encryptor = cipher.encryptor() + encr = encryptor.update(data) + encr += encryptor.finalize() data = iv_bytes + encr sig = hmac.new(hmac_key, data, hashlib.sha256).digest() return data + sig @@ -1491,7 +1551,6 @@ class Crypticle: log.debug("Failed to authenticate message") raise AuthenticationError("message authentication failed") result = 0 - for zipped_x, zipped_y in zip(mac_bytes, sig): result |= zipped_x ^ zipped_y if result != 0: @@ -1499,15 +1558,9 @@ class Crypticle: raise AuthenticationError("message authentication failed") iv_bytes = data[: self.AES_BLOCK_SIZE] data = data[self.AES_BLOCK_SIZE :] - if HAS_M2: - cypher = EVP.Cipher( - alg="aes_192_cbc", key=aes_key, iv=iv_bytes, op=0, padding=False - ) - encr = cypher.update(data) - data = encr + cypher.final() - else: - cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes) - data = cypher.decrypt(data) + cipher = Cipher(algorithms.AES(aes_key), modes.CBC(iv_bytes)) + decryptor = cipher.decryptor() + data = decryptor.update(data) + decryptor.finalize() return data[: -data[-1]] def dumps(self, obj, nonce=None): diff --git a/salt/master.py b/salt/master.py index 23c0de8465e..4937274a41b 100644 --- a/salt/master.py +++ b/salt/master.py @@ -1275,7 +1275,7 @@ class AESFuncs(TransportMethods): pub_path = os.path.join(self.opts["pki_dir"], "minions", id_) try: - pub = salt.crypt.get_rsa_pub_key(pub_path) + pub = salt.crypt.PublicKey(pub_path) except OSError: log.warning( "Salt minion claiming to be %s attempted to communicate with " @@ -1286,7 +1286,7 @@ class AESFuncs(TransportMethods): except (ValueError, IndexError, TypeError) as err: log.error('Unable to load public key "%s": %s', pub_path, err) try: - if salt.crypt.public_decrypt(pub, token) == b"salt": + if pub.decrypt(token) == b"salt": return True except ValueError as err: log.error("Unable to decrypt token: %s", err) diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index 64776d204da..f2e96d473d1 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -746,9 +746,17 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer): @salt.ext.tornado.gen.coroutine def on_recv(packages): - for package in packages: - payload = salt.payload.loads(package) - yield publish_payload(payload) + try: + for package in packages: + payload = salt.payload.loads(package) + log.error("on recv") + yield publish_payload(payload) + except Exception as exc: # pylint: disable=broad-except + log.error( + "Un-handled error in publisher %s", + exc, + exc_info_on_loglevel=logging.DEBUG, + ) pull_sock.on_recv(on_recv) try: diff --git a/tests/pytests/unit/crypt/__init__.py b/tests/pytests/unit/crypt/__init__.py index d8427ce59ac..e60c70e2509 100644 --- a/tests/pytests/unit/crypt/__init__.py +++ b/tests/pytests/unit/crypt/__init__.py @@ -1,3 +1,196 @@ +import getpass +import logging +import os + +import salt.utils.files +import salt.utils.stringutils +from salt.exceptions import InvalidKeyError + +try: + from M2Crypto import BIO, EVP, RSA + + HAS_M2 = True +except ImportError: + HAS_M2 = False + +if not HAS_M2: + try: + from Cryptodome import Random + from Cryptodome.Cipher import AES, PKCS1_OAEP + from Cryptodome.Cipher import PKCS1_v1_5 as PKCS1_v1_5_CIPHER + from Cryptodome.Hash import SHA + from Cryptodome.PublicKey import RSA + from Cryptodome.Signature import PKCS1_v1_5 + + HAS_CRYPTO = True + except ImportError: + HAS_CRYPTO = False + +if not HAS_M2 and not HAS_CRYPTO: + try: + # let this be imported, if possible + from Crypto import Random # nosec + from Crypto.Cipher import AES, PKCS1_OAEP # nosec + from Crypto.Cipher import PKCS1_v1_5 as PKCS1_v1_5_CIPHER # nosec + from Crypto.Hash import SHA # nosec + from Crypto.PublicKey import RSA # nosec + from Crypto.Signature import PKCS1_v1_5 # nosec + + HAS_CRYPTO = True + except ImportError: + HAS_CRYPTO = False + +log = logging.getLogger(__name__) + + +def legacy_gen_keys(keydir, keyname, keysize, user=None, passphrase=None): + """ + Generate a RSA public keypair for use with salt + + :param str keydir: The directory to write the keypair to + :param str keyname: The type of salt server for whom this key should be written. (i.e. 'master' or 'minion') + :param int keysize: The number of bits in the key + :param str user: The user on the system who should own this keypair + :param str passphrase: The passphrase which should be used to encrypt the private key + + :rtype: str + :return: Path on the filesystem to the RSA private key + """ + base = os.path.join(keydir, keyname) + priv = f"{base}.pem" + pub = f"{base}.pub" + + # gen = rsa.generate_private_key(e, keysize) + if HAS_M2: + gen = RSA.gen_key(keysize, 65537, lambda: None) + else: + salt.utils.crypt.reinit_crypto() + gen = RSA.generate(bits=keysize, e=65537) + + if os.path.isfile(priv): + # Between first checking and the generation another process has made + # a key! Use the winner's key + return priv + + # Do not try writing anything, if directory has no permissions. + if not os.access(keydir, os.W_OK): + raise OSError( + 'Write access denied to "{}" for user "{}".'.format( + os.path.abspath(keydir), getpass.getuser() + ) + ) + + with salt.utils.files.set_umask(0o277): + if HAS_M2: + # if passphrase is empty or None use no cipher + if not passphrase: + gen.save_pem(priv, cipher=None) + else: + gen.save_pem( + priv, + cipher="des_ede3_cbc", + callback=lambda x: salt.utils.stringutils.to_bytes(passphrase), + ) + else: + with salt.utils.files.fopen(priv, "wb+") as f: + f.write(gen.exportKey("PEM", passphrase)) + if HAS_M2: + gen.save_pub_key(pub) + else: + with salt.utils.files.fopen(pub, "wb+") as f: + f.write(gen.publickey().exportKey("PEM")) + os.chmod(priv, 0o400) + if user: + try: + import pwd + + uid = pwd.getpwnam(user).pw_uid + os.chown(priv, uid, -1) + os.chown(pub, uid, -1) + except (KeyError, ImportError, OSError): + # The specified user was not found, allow the backup systems to + # report the error + pass + return priv + + +class LegacyPrivateKey: + def __init__(self, path, passphrase=None): + if HAS_M2: + self.key = RSA.load_key(path, lambda x: bytes(passphrase)) + else: + with salt.utils.files.fopen(path) as f: + self.key = RSA.importKey(f.read(), passphrase) + + def encrypt(self, data): + if HAS_M2: + return self.key.private_encrypt(data, salt.utils.rsax931.RSA_X931_PADDING) + else: + return salt.utils.rsax931.RSAX931Signer(self.key.exportKey("PEM")).sign( + data + ) + + def sign(self, data): + if HAS_M2: + md = EVP.MessageDigest("sha1") + md.update(salt.utils.stringutils.to_bytes(data)) + digest = md.final() + return self.key.sign(digest) + else: + signer = PKCS1_v1_5.new(self.key) + return signer.sign(SHA.new(salt.utils.stringutils.to_bytes(data))) + + +class LegacyPublicKey: + def __init__(self, path, _HAS_M2=HAS_M2): + self._HAS_M2 = _HAS_M2 + if self._HAS_M2: + with salt.utils.files.fopen(path, "rb") as f: + data = f.read().replace(b"RSA ", b"") + bio = BIO.MemoryBuffer(data) + try: + self.key = RSA.load_pub_key_bio(bio) + except RSA.RSAError: + raise InvalidKeyError("Encountered bad RSA public key") + else: + with salt.utils.files.fopen(path) as f: + try: + self.key = RSA.importKey(f.read()) + except (ValueError, IndexError, TypeError): + raise InvalidKeyError("Encountered bad RSA public key") + + def encrypt(self, data): + bdata = salt.utils.stringutils.to_bytes(data) + if self._HAS_M2: + return self.key.public_encrypt(bdata, RSA.pkcs1_oaep_padding) + else: + return PKCS1_OAEP.new(self.key).encrypt(bdata) + + def verify(self, data, signature): + if self._HAS_M2: + md = EVP.MessageDigest("sha1") + md.update(salt.utils.stringutils.to_bytes(data)) + digest = md.final() + try: + return self.key.verify(digest, signature) + except RSA.RSAError as exc: + log.debug("Signature verification failed: %s", exc.args[0]) + return False + else: + verifier = PKCS1_v1_5.new(self.key) + return verifier.verify( + SHA.new(salt.utils.stringutils.to_bytes(data)), signature + ) + + def decrypt(self, data): + data = salt.utils.stringutils.to_bytes(data) + if HAS_M2: + return self.key.public_decrypt(data, salt.utils.rsax931.RSA_X931_PADDING) + else: + verifier = salt.utils.rsax931.RSAX931Verifier(self.key.exportKey("PEM")) + return verifier.verify(data) + + PRIVKEY_DATA = ( "-----BEGIN RSA PRIVATE KEY-----\n" "MIIEpAIBAAKCAQEA75GR6ZTv5JOv90Vq8tKhKC7YQnhDIo2hM0HVziTEk5R4UQBW\n" @@ -56,3 +249,116 @@ SIG = ( b"\x98\x8a\x8a&#\xb9(#?\x80\x15\x9eW\xb5\x12\xd1\x95S\xf2\xc3A\xed\x86x~\xcfU\xd5Q\xfe~\x10\xd2\x9b" ) + +SIGNATURE = ( + b"w\xac\xfe18o\xeb\xfb\x14+\x9e\xd1\xb7\x7fe}\xec\xd6\xe1P\x9e\xab" + b"\xb5\x07\xe0\xc1\xfd\xda#\x04Z\x8d\x7f\x0b\x1f}:~\xb2s\x860u\x02N" + b'\xd4q"\xb7\x86*\x8f\x1f\xd0\x9d\x11\x92\xc5~\xa68\xac>\x12H\xc2%y,' + b"\xe6\xceU\x1e\xa3?\x0c,\xf0u\xbb\xd0[g_\xdd\x8b\xb0\x95:Y\x18\xa5*" + b"\x99\xfd\xf3K\x92\x92 ({\xd1\xff\xd9F\xc8\xd6K\x86e\xf9\xa8\xad\xb0z" + b"\xe3\x9dD\xf5k\x8b_<\xe7\xe7\xec\xf3\"'\xd5\xd2M\xb4\xce\x1a\xe3$" + b"\x9c\x81\xad\xf9\x11\xf6\xf5>)\xc7\xdd\x03&\xf7\x86@ks\xa6\x05\xc2" + b"\xd0\xbd\x1a7\xfc\xde\xe6\xb0\xad!\x12#\xc86Y\xea\xc5\xe3\xe2\xb3" + b"\xc9\xaf\xfa\x0c\xf2?\xbf\x93w\x18\x9e\x0b\xa2a\x10:M\x05\x89\xe2W.Q" + b"\xe8;yGT\xb1\xf2\xc6A\xd2\xc4\xbeN\xb3\xcfS\xaf\x03f\xe2\xb4)\xe7\xf6" + b'\xdbs\xd0Z}8\xa4\xd2\x1fW*\xe6\x1c"\x8b\xd0\x18w\xb9\x7f\x9e\x96\xa3' + b"\xd9v\xf7\x833\x8e\x01" +) + +TEST_KEY = ( + "-----BEGIN RSA PUBLIC KEY-----\n" + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAzLtFhsvfbFDFaUgulSEX\n" + "Gl12XriL1DT78Ef2/u8HHaSMmPie37BLWas/zaHwI6066bIyYQJ/nUCahTaoHM7L\n" + "GlWc0wOU6zyfpihCRQHil05Y6F+olFBoZuYbFPvtp7/hJx/D7I/0n2o/c7M5i3Y2\n" + "3sBxAYNooIQHXHUmPQW6C9iu95ylZDW8JQzYy/EI4vCC8yQMdTK8jK1FQV0Sbwny\n" + "qcMxSyAWDoFbnhh2P2TnO8HOWuUOaXR8ZHOJzVcDl+a6ew+medW090x3K5O1f80D\n" + "+WjgnG6b2HG7VQpOCfM2GALD/FrxicPilvZ38X1aLhJuwjmVE4LAAv8DVNJXohaO\n" + "WQIDAQAB\n" + "-----END RSA PUBLIC KEY-----\n" +) + +PRIV_KEY = """ +-----BEGIN RSA PRIVATE KEY----- +MIIEogIBAAKCAQEAoAsMPt+4kuIG6vKyw9r3+OuZrVBee/2vDdVetW+Js5dTlgrJ +aghWWn3doGmKlEjqh7E4UTa+t2Jd6w8RSLnyHNJ/HpVhMG0M07MF6FMfILtDrrt8 +ZX7eDVt8sx5gCEpYI+XG8Y07Ga9i3Hiczt+fu6HYwu96HggmG2pqkOrn3iGfqBvV +YVFJzSZYe7e4c1PeEs0xYcrA4k+apyGsMtpef8vRUrNicRLc7dAcvfhtgt2DXEZ2 +d72t/CR4ygtUvPXzisaTPW0G7OWAheCloqvTIIPQIjR8htFxGTz02STVXfnhnJ0Z +k8KhqKF2v1SQvIYxsZU7jaDgl5i3zpeh58cYOwIDAQABAoIBABZUJEO7Y91+UnfC +H6XKrZEZkcnH7j6/UIaOD9YhdyVKxhsnax1zh1S9vceNIgv5NltzIsfV6vrb6v2K +Dx/F7Z0O0zR5o+MlO8ZncjoNKskex10gBEWG00Uqz/WPlddiQ/TSMJTv3uCBAzp+ +S2Zjdb4wYPUlgzSgb2ygxrhsRahMcSMG9PoX6klxMXFKMD1JxiY8QfAHahPzQXy9 +F7COZ0fCVo6BE+MqNuQ8tZeIxu8mOULQCCkLFwXmkz1FpfK/kNRmhIyhxwvCS+z4 +JuErW3uXfE64RLERiLp1bSxlDdpvRO2R41HAoNELTsKXJOEt4JANRHm/CeyA5wsh +NpscufUCgYEAxhgPfcMDy2v3nL6KtkgYjdcOyRvsAF50QRbEa8ldO+87IoMDD/Oe +osFERJ5hhyyEO78QnaLVegnykiw5DWEF02RKMhD/4XU+1UYVhY0wJjKQIBadsufB +2dnaKjvwzUhPh5BrBqNHl/FXwNCRDiYqXa79eWCPC9OFbZcUWWq70s8CgYEAztOI +61zRfmXJ7f70GgYbHg+GA7IrsAcsGRITsFR82Ho0lqdFFCxz7oK8QfL6bwMCGKyk +nzk+twh6hhj5UNp18KN8wktlo02zTgzgemHwaLa2cd6xKgmAyuPiTgcgnzt5LVNG +FOjIWkLwSlpkDTl7ZzY2QSy7t+mq5d750fpIrtUCgYBWXZUbcpPL88WgDB7z/Bjg +dlvW6JqLSqMK4b8/cyp4AARbNp12LfQC55o5BIhm48y/M70tzRmfvIiKnEc/gwaE +NJx4mZrGFFURrR2i/Xx5mt/lbZbRsmN89JM+iKWjCpzJ8PgIi9Wh9DIbOZOUhKVB +9RJEAgo70LvCnPTdS0CaVwKBgDJW3BllAvw/rBFIH4OB/vGnF5gosmdqp3oGo1Ik +jipmPAx6895AH4tquIVYrUl9svHsezjhxvjnkGK5C115foEuWXw0u60uiTiy+6Pt +2IS0C93VNMulenpnUrppE7CN2iWFAiaura0CY9fE/lsVpYpucHAWgi32Kok+ZxGL +WEttAoGAN9Ehsz4LeQxEj3x8wVeEMHF6OsznpwYsI2oVh6VxpS4AjgKYqeLVcnNi +TlZFsuQcqgod8OgzA91tdB+Rp86NygmWD5WzeKXpCOg9uA+y/YL+0sgZZHsuvbK6 +PllUgXdYxqClk/hdBFB7v9AQoaj7K9Ga22v32msftYDQRJ94xOI= +-----END RSA PRIVATE KEY----- +""" + + +PUB_KEY = """ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoAsMPt+4kuIG6vKyw9r3 ++OuZrVBee/2vDdVetW+Js5dTlgrJaghWWn3doGmKlEjqh7E4UTa+t2Jd6w8RSLny +HNJ/HpVhMG0M07MF6FMfILtDrrt8ZX7eDVt8sx5gCEpYI+XG8Y07Ga9i3Hiczt+f +u6HYwu96HggmG2pqkOrn3iGfqBvVYVFJzSZYe7e4c1PeEs0xYcrA4k+apyGsMtpe +f8vRUrNicRLc7dAcvfhtgt2DXEZ2d72t/CR4ygtUvPXzisaTPW0G7OWAheCloqvT +IIPQIjR8htFxGTz02STVXfnhnJ0Zk8KhqKF2v1SQvIYxsZU7jaDgl5i3zpeh58cY +OwIDAQAB +-----END PUBLIC KEY----- +""" + +PRIV_KEY2 = """ +-----BEGIN RSA PRIVATE KEY----- +MIIEogIBAAKCAQEAp+8cTxguO6Vg+YO92VfHgNld3Zy8aM3JbZvpJcjTnis+YFJ7 +Zlkcc647yPRRwY9nYBNywahnt5kIeuT1rTvTsMBZWvmUoEVUj1Xg8XXQkBvb9Ozy +Gqy/G/p8KDDpzMP/U+XCnUeHiXTZrgnqgBIc2cKeCVvWFqDi0GRFGzyaXLaX3PPm +M7DJ0MIPL1qgmcDq6+7Ze0gJ9SrDYFAeLmbuT1OqDfufXWQl/82JXeiwU2cOpqWq +7n5fvPOWim7l1tzQ+dSiMRRm0xa6uNexCJww3oJSwvMbAmgzvOhqqhlqv+K7u0u7 +FrFFojESsL36Gq4GBrISnvu2tk7u4GGNTYYQbQIDAQABAoIBAADrqWDQnd5DVZEA +lR+WINiWuHJAy/KaIC7K4kAMBgbxrz2ZbiY9Ok/zBk5fcnxIZDVtXd1sZicmPlro +GuWodIxdPZAnWpZ3UtOXUayZK/vCP1YsH1agmEqXuKsCu6Fc+K8VzReOHxLUkmXn +FYM+tixGahXcjEOi/aNNTWitEB6OemRM1UeLJFzRcfyXiqzHpHCIZwBpTUAsmzcG +QiVDkMTKubwo/m+PVXburX2CGibUydctgbrYIc7EJvyx/cpRiPZXo1PhHQWdu4Y1 +SOaC66WLsP/wqvtHo58JQ6EN/gjSsbAgGGVkZ1xMo66nR+pLpR27coS7o03xCks6 +DY/0mukCgYEAuLIGgBnqoh7YsOBLd/Bc1UTfDMxJhNseo+hZemtkSXz2Jn51322F +Zw/FVN4ArXgluH+XsOhvG/MFFpojwZSrb0Qq5b1MRdo9qycq8lGqNtlN1WHqosDQ +zW29kpL0tlRrSDpww3wRESsN9rH5XIrJ1b3ZXuO7asR+KBVQMy/+NcUCgYEA6MSC +c+fywltKPgmPl5j0DPoDe5SXE/6JQy7w/vVGrGfWGf/zEJmhzS2R+CcfTTEqaT0T +Yw8+XbFgKAqsxwtE9MUXLTVLI3sSUyE4g7blCYscOqhZ8ItCUKDXWkSpt++rG0Um +1+cEJP/0oCazG6MWqvBC4NpQ1nzh46QpjWqMwokCgYAKDLXJ1p8rvx3vUeUJW6zR +dfPlEGCXuAyMwqHLxXgpf4EtSwhC5gSyPOtx2LqUtcrnpRmt6JfTH4ARYMW9TMef +QEhNQ+WYj213mKP/l235mg1gJPnNbUxvQR9lkFV8bk+AGJ32JRQQqRUTbU+yN2MQ +HEptnVqfTp3GtJIultfwOQKBgG+RyYmu8wBP650izg33BXu21raEeYne5oIqXN+I +R5DZ0JjzwtkBGroTDrVoYyuH1nFNEh7YLqeQHqvyufBKKYo9cid8NQDTu+vWr5UK +tGvHnwdKrJmM1oN5JOAiq0r7+QMAOWchVy449VNSWWV03aeftB685iR5BXkstbIQ +EVopAoGAfcGBTAhmceK/4Q83H/FXBWy0PAa1kZGg/q8+Z0KY76AqyxOVl0/CU/rB +3tO3sKhaMTHPME/MiQjQQGoaK1JgPY6JHYvly2KomrJ8QTugqNGyMzdVJkXAK2AM +GAwC8ivAkHf8CHrHa1W7l8t2IqBjW1aRt7mOW92nfG88Hck0Mbo= +-----END RSA PRIVATE KEY----- +""" + + +PUB_KEY2 = """ +-----BEGIN PUBLIC KEY----- +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAp+8cTxguO6Vg+YO92VfH +gNld3Zy8aM3JbZvpJcjTnis+YFJ7Zlkcc647yPRRwY9nYBNywahnt5kIeuT1rTvT +sMBZWvmUoEVUj1Xg8XXQkBvb9OzyGqy/G/p8KDDpzMP/U+XCnUeHiXTZrgnqgBIc +2cKeCVvWFqDi0GRFGzyaXLaX3PPmM7DJ0MIPL1qgmcDq6+7Ze0gJ9SrDYFAeLmbu +T1OqDfufXWQl/82JXeiwU2cOpqWq7n5fvPOWim7l1tzQ+dSiMRRm0xa6uNexCJww +3oJSwvMbAmgzvOhqqhlqv+K7u0u7FrFFojESsL36Gq4GBrISnvu2tk7u4GGNTYYQ +bQIDAQAB +-----END PUBLIC KEY----- +""" diff --git a/tests/pytests/unit/crypt/test_crypt_cryptography.py b/tests/pytests/unit/crypt/test_crypt_cryptography.py new file mode 100644 index 00000000000..c66c8776d43 --- /dev/null +++ b/tests/pytests/unit/crypt/test_crypt_cryptography.py @@ -0,0 +1,286 @@ +import hashlib +import hmac +import os + +import pytest +from cryptography.hazmat.backends.openssl import backend +from cryptography.hazmat.primitives import serialization + +import salt.crypt as crypt +import salt.utils.files +import salt.utils.stringutils +from tests.conftest import FIPS_TESTRUN +from tests.support.mock import mock_open, patch + +from . import ( + HAS_M2, + MSG, + PRIVKEY_DATA, + PUBKEY_DATA, + SIG, + LegacyPrivateKey, + LegacyPublicKey, + legacy_gen_keys, +) + +if HAS_M2: + from . import EVP, RSA +else: + from . import AES, PKCS1_OAEP + + +@pytest.fixture +def passphrase(): + return "pass1234" + + +@pytest.fixture +def signing_algorithm(): + if FIPS_TESTRUN: + return salt.crypt.PKCS1v15_SHA224 + return salt.crypt.PKCS1v15_SHA1 + + +@pytest.fixture +def encryption_algorithm(): + if FIPS_TESTRUN: + return salt.crypt.OAEP_SHA224 + return salt.crypt.OAEP_SHA1 + + +SIG_SHA224 = ( + b'\x18)\xc3E|\x15\xebF\x0f\xe6\xc0\x10\xca\xd9~\x1d\xf14t\xc7\x14}\xda6Fk"#' + b'Hl\x06\x13\xa9\xe3QlL\\\xf4`r\x88\x85\xc6#s\xcb"6\x1c\xdd\x07t\xd4\x84g' + b"n\x0f\xcc\x1c\xee\xe7\x84T\xb7\xd1\xc80~\xdd\xf7+\x972b6\xf1\xe1\x00P" + b"E\xb8\x86\xb3i\xa6*\xd2\xac\xb5\xcbStg\xfb*E9+\xf7\xc5\xc6X\x1e\xb9vY\xb7" + b"kT[a\xe8\xe1\xd8\xdf'u\x00k\x13\xff\xe2\xd1\x91M\xa7U\xc9\x90z\xf0" + b"\x03\xb2\xf3\x1bR\xbd\xc8\xe4B\xadJ\x91\x1e\x98\xea\x17\xa8;\x01\xcb" + b"1\x07\x7f\xa2\xf3\xe6\x83\xed\x03m\xad\t&\x95\xc2Q\xfcs\xcbV\xd4\xa4\xc9n" + b"\x8a\xbe\xcc3?.N\x1f8d{B\x8cp\xf8\xc8\x17\x90\x0e\x0c\x1a\x8dF\xb8" + b'\x18\xf7\x97\xf0\x04L\xe6\xfb\xc1\xb0}\xa9\xb6?\xc0\xbd\x8a<\xac"5\xee@x' + b"\xea\x1d\xa3\xffB\xa5\xbdt`\xa5\xe8p\xa3/\x18+\xec5\xb3]\x92\xaa\xd7\x9c" + b"\x0b\x03`~\x00\r%\xc8" +) + + +@pytest.fixture +def signature(): + if FIPS_TESTRUN: + return SIG_SHA224 + return SIG + + +@pytest.fixture +def private_key(passphrase, tmp_path): + keypath = tmp_path / "keys" + keypath.mkdir() + keyname = "test" + keysize = 2048 + return crypt.gen_keys(str(keypath), keyname, keysize, passphrase=passphrase) + + +def test_fips_mode(): + assert backend._fips_enabled == FIPS_TESTRUN + + +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") +def test_gen_keys_legacy(tmp_path): + keypath = tmp_path / "keys" + keypath.mkdir() + passphrase = "pass1234" + keyname = "test" + keysize = 2048 + ret = legacy_gen_keys(str(keypath), keyname, keysize, passphrase=passphrase) + with salt.utils.files.fopen(ret, "rb") as fp: + keybytes = fp.read() + assert keybytes.startswith(b"-----BEGIN RSA PRIVATE KEY-----\n") + priv = serialization.load_pem_private_key(keybytes, passphrase.encode()) + with salt.utils.files.fopen(ret.replace(".pem", ".pub"), "rb") as fp: + keybytes = fp.read() + assert keybytes.startswith(b"-----BEGIN PUBLIC KEY-----\n") + + +def test_gen_keys(tmp_path): + keypath = tmp_path / "keys" + keypath.mkdir() + passphrase = "pass1234" + keyname = "test" + keysize = 2048 + ret = crypt.gen_keys(str(keypath), keyname, keysize, passphrase=passphrase) + with salt.utils.files.fopen(ret, "rb") as fp: + keybytes = fp.read() + if FIPS_TESTRUN: + assert keybytes.startswith(b"-----BEGIN ENCRYPTED PRIVATE KEY-----\n") + else: + assert keybytes.startswith(b"-----BEGIN RSA PRIVATE KEY-----\n") + priv = serialization.load_pem_private_key(keybytes, passphrase.encode()) + with salt.utils.files.fopen(ret.replace(".pem", ".pub"), "rb") as fp: + keybytes = fp.read() + assert keybytes.startswith(b"-----BEGIN PUBLIC KEY-----\n") + + +def test_legacy_private_key_loading(private_key, passphrase): + priv = LegacyPrivateKey(private_key.encode(), passphrase.encode()) + assert priv.key + + +def test_private_key_loading(private_key, passphrase): + priv = crypt.PrivateKey(private_key, passphrase) + assert priv.key + + +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") +def test_private_key_signing(private_key, passphrase): + lpriv = LegacyPrivateKey(private_key.encode(), passphrase.encode()) + priv = crypt.PrivateKey(private_key, passphrase) + data = b"meh" + signature = priv.sign(data) + lsignature = lpriv.sign(data) + assert lsignature == signature + + +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") +def test_legacy_public_key_verify(private_key, passphrase): + lpriv = crypt.PrivateKey(private_key, passphrase) + data = b"meh" + signature = lpriv.sign(data) + pubkey = LegacyPublicKey(private_key.replace(".pem", ".pub")) + assert pubkey.verify(data, signature) + + +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") +def test_public_key_verify(private_key, passphrase): + lpriv = LegacyPrivateKey(private_key.encode(), passphrase.encode()) + data = b"meh" + signature = lpriv.sign(data) + pubkey = crypt.PublicKey(private_key.replace(".pem", ".pub")) + assert pubkey.verify(data, signature) + + +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") +def test_public_key_encrypt(private_key, passphrase): + pubkey = crypt.PublicKey(private_key.replace(".pem", ".pub")) + data = b"meh" + enc = pubkey.encrypt(data) + + lpriv = LegacyPrivateKey(private_key.encode(), passphrase.encode()) + if HAS_M2: + dec = lpriv.key.private_decrypt(enc, RSA.pkcs1_oaep_padding) + else: + cipher = PKCS1_OAEP.new(lpriv.key) + dec = cipher.decrypt(enc) + + assert data == dec + + +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") +def test_private_key_decrypt(private_key, passphrase): + lpubkey = LegacyPublicKey(private_key.replace(".pem", ".pub")) + data = b"meh" + enc = lpubkey.encrypt(data) + priv = crypt.PrivateKey(private_key, passphrase) + dec = priv.key.decrypt( + enc, + crypt.padding.OAEP( + mgf=crypt.padding.MGF1(algorithm=crypt.hashes.SHA1()), + algorithm=crypt.hashes.SHA1(), + label=None, + ), + ) + + assert data == dec + + +def test_legacy_aes_encrypt(): + """ + Test that the legacy aes encryption can be decrypted by cryptography + """ + orig_data = b"meh" + crypticle = salt.crypt.Crypticle({}, salt.crypt.Crypticle.generate_key_string()) + aes_key, hmac_key = crypticle.keys + pad = crypticle.AES_BLOCK_SIZE - len(orig_data) % crypticle.AES_BLOCK_SIZE + data = orig_data + salt.utils.stringutils.to_bytes(pad * chr(pad)) + iv_bytes = os.urandom(crypticle.AES_BLOCK_SIZE) + iv_bytes = data[: crypticle.AES_BLOCK_SIZE] + if HAS_M2: + cypher = EVP.Cipher( + alg="aes_192_cbc", key=aes_key, iv=iv_bytes, op=1, padding=False + ) + encr = cypher.update(data) + encr += cypher.final() + else: + cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes) + encr = cypher.encrypt(data) + + data = iv_bytes + encr + sig = hmac.new(hmac_key, data, hashlib.sha256).digest() + assert orig_data == crypticle.decrypt(data + sig) + + +def test_aes_encrypt(): + """ + Test that cryptography aes encryption can be decrypted by the legacy libraries + """ + orig_data = b"meh" + crypticle = salt.crypt.Crypticle({}, salt.crypt.Crypticle.generate_key_string()) + + data = crypticle.encrypt(orig_data) + aes_key, hmac_key = crypticle.keys + sig = data[-crypticle.SIG_SIZE :] + data = data[: -crypticle.SIG_SIZE] + if not isinstance(data, bytes): + data = salt.utils.stringutils.to_bytes(data) + mac_bytes = hmac.new(hmac_key, data, hashlib.sha256).digest() + result = 0 + for zipped_x, zipped_y in zip(mac_bytes, sig): + result |= zipped_x ^ zipped_y + iv_bytes = data[: crypticle.AES_BLOCK_SIZE] + data = data[crypticle.AES_BLOCK_SIZE :] + if HAS_M2: + cypher = EVP.Cipher( + alg="aes_192_cbc", key=aes_key, iv=iv_bytes, op=0, padding=False + ) + encr = cypher.update(data) + data = encr + cypher.final() + else: + cypher = AES.new(aes_key, AES.MODE_CBC, iv_bytes) + data = cypher.decrypt(data) + data = data[: -data[-1]] + assert orig_data == data + + +def test_encrypt_decrypt(private_key, passphrase, encryption_algorithm): + pubkey = crypt.PublicKey(private_key.replace(".pem", ".pub")) + enc = pubkey.encrypt(b"meh", algorithm=encryption_algorithm) + privkey = crypt.PrivateKey(private_key, passphrase) + assert privkey.decrypt(enc, algorithm=encryption_algorithm) == b"meh" + + +def test_sign_message(signature, signing_algorithm): + key = salt.crypt.serialization.load_pem_private_key(PRIVKEY_DATA.encode(), None) + with patch("salt.crypt.get_rsa_key", return_value=key): + assert ( + salt.crypt.sign_message( + "/keydir/keyname.pem", MSG, algorithm=signing_algorithm + ) + == signature + ) + + +def test_sign_message_with_passphrase(signature, signing_algorithm): + key = salt.crypt.serialization.load_pem_private_key(PRIVKEY_DATA.encode(), None) + with patch("salt.crypt.get_rsa_key", return_value=key): + assert ( + salt.crypt.sign_message( + "/keydir/keyname.pem", + MSG, + passphrase="password", + algorithm=signing_algorithm, + ) + == signature + ) + + +def test_verify_signature(): + with patch("salt.utils.files.fopen", mock_open(read_data=PUBKEY_DATA.encode())): + assert salt.crypt.verify_signature("/keydir/keyname.pub", MSG, SIG) From b32f09b65516a457e9efd0e66d7d5f4a9873f6b9 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 01:27:41 -0700 Subject: [PATCH 077/160] Clean up un-needed re-init crypto and test fix --- salt/channel/server.py | 5 +- salt/cloud/__init__.py | 14 - salt/crypt.py | 2 - salt/master.py | 2 - salt/minion.py | 2 - salt/modules/inspectlib/collector.py | 6 +- salt/utils/crypt.py | 43 - tests/conftest.py | 10 + tests/pytests/conftest.py | 7 + tests/pytests/unit/conftest.py | 9 + .../unit/crypt/test_crypt_cryptography.py | 88 ++ tests/pytests/unit/transport/test_zeromq.py | 781 ++++++++++-------- tests/pytests/unit/utils/test_crypt.py | 45 - 13 files changed, 553 insertions(+), 461 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index 7ffb19dde44..ca5de7bf2d5 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -53,7 +53,10 @@ class ReqServerChannel: def __init__(self, opts, transport): self.opts = opts self.transport = transport - self.event = None + self.event = salt.utils.event.get_master_event( + self.opts, self.opts["sock_dir"], listen=False + ) + self.master_key = salt.crypt.MasterKeys(self.opts) def pre_fork(self, process_manager): """ diff --git a/salt/cloud/__init__.py b/salt/cloud/__init__.py index cad276b853a..f205ee6c920 100644 --- a/salt/cloud/__init__.py +++ b/salt/cloud/__init__.py @@ -35,14 +35,6 @@ from salt.exceptions import ( ) from salt.template import compile_template -try: - import Cryptodome.Random -except ImportError: - try: - import Crypto.Random # nosec - except ImportError: - pass # pycrypto < 2.1 - log = logging.getLogger(__name__) @@ -2288,8 +2280,6 @@ def create_multiprocessing(parallel_data, queue=None): This function will be called from another process when running a map in parallel mode. The result from the create is always a json object. """ - salt.utils.crypt.reinit_crypto() - parallel_data["opts"]["output"] = "json" cloud = Cloud(parallel_data["opts"]) try: @@ -2318,8 +2308,6 @@ def destroy_multiprocessing(parallel_data, queue=None): This function will be called from another process when running a map in parallel mode. The result from the destroy is always a json object. """ - salt.utils.crypt.reinit_crypto() - parallel_data["opts"]["output"] = "json" clouds = salt.loader.clouds(parallel_data["opts"]) @@ -2350,8 +2338,6 @@ def run_parallel_map_providers_query(data, queue=None): This function will be called from another process when building the providers map. """ - salt.utils.crypt.reinit_crypto() - cloud = Cloud(data["opts"]) try: with salt.utils.context.func_globals_inject( diff --git a/salt/crypt.py b/salt/crypt.py index deaa871eef7..f4e397a9bba 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -622,8 +622,6 @@ class AsyncAuth: self.get_keys() self.io_loop = io_loop or salt.ext.tornado.ioloop.IOLoop.current() - - salt.utils.crypt.reinit_crypto() key = self.__key(self.opts) # TODO: if we already have creds for this key, lets just re-use if key in AsyncAuth.creds_map: diff --git a/salt/master.py b/salt/master.py index 4937274a41b..e6ba5f1d8d8 100644 --- a/salt/master.py +++ b/salt/master.py @@ -37,7 +37,6 @@ import salt.serializers.msgpack import salt.state import salt.utils.args import salt.utils.atomicfile -import salt.utils.crypt import salt.utils.event import salt.utils.files import salt.utils.gitfs @@ -1156,7 +1155,6 @@ class MWorker(salt.utils.process.SignalHandlingProcess): ) self.clear_funcs.connect() self.aes_funcs = AESFuncs(self.opts) - salt.utils.crypt.reinit_crypto() self.__bind() diff --git a/salt/minion.py b/salt/minion.py index 12c9a86ba2a..cb4ef919b30 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -39,7 +39,6 @@ import salt.syspaths import salt.transport import salt.utils.args import salt.utils.context -import salt.utils.crypt import salt.utils.data import salt.utils.dictdiffer import salt.utils.dictupdate @@ -1811,7 +1810,6 @@ class Minion(MinionBase): name=name, args=(instance, self.opts, data, self.connected, creds_map), ) - process.register_after_fork_method(salt.utils.crypt.reinit_crypto) else: process = threading.Thread( target=self._target, diff --git a/salt/modules/inspectlib/collector.py b/salt/modules/inspectlib/collector.py index d92646c9fc3..9d22808434a 100644 --- a/salt/modules/inspectlib/collector.py +++ b/salt/modules/inspectlib/collector.py @@ -17,7 +17,6 @@ import os import subprocess import sys -import salt.utils.crypt import salt.utils.files import salt.utils.fsutils import salt.utils.path @@ -579,10 +578,9 @@ if __name__ == "__main__": # Double-fork stuff try: if os.fork() > 0: - salt.utils.crypt.reinit_crypto() sys.exit(0) else: - salt.utils.crypt.reinit_crypto() + pass except OSError as ex: sys.exit(1) @@ -592,7 +590,6 @@ if __name__ == "__main__": try: pid = os.fork() if pid > 0: - salt.utils.crypt.reinit_crypto() with salt.utils.files.fopen( os.path.join(pidfile, EnvLoader.PID_FILE), "w" ) as fp_: @@ -601,5 +598,4 @@ if __name__ == "__main__": except OSError as ex: sys.exit(1) - salt.utils.crypt.reinit_crypto() main(dbfile, pidfile, mode) diff --git a/salt/utils/crypt.py b/salt/utils/crypt.py index 044eebe7a77..5505c0eacf0 100644 --- a/salt/utils/crypt.py +++ b/salt/utils/crypt.py @@ -12,35 +12,6 @@ from salt.exceptions import SaltInvocationError log = logging.getLogger(__name__) -try: - import M2Crypto # pylint: disable=unused-import - - Random = None - HAS_M2CRYPTO = True -except ImportError: - HAS_M2CRYPTO = False - -if not HAS_M2CRYPTO: - try: - from Cryptodome import Random - - HAS_CRYPTODOME = True - except ImportError: - HAS_CRYPTODOME = False -else: - HAS_CRYPTODOME = False - -if not HAS_M2CRYPTO and not HAS_CRYPTODOME: - try: - from Crypto import Random # nosec - - HAS_CRYPTO = True - except ImportError: - HAS_CRYPTO = False -else: - HAS_CRYPTO = False - - def decrypt( data, rend, translate_newlines=False, renderers=None, opts=None, valid_rend=None ): @@ -117,20 +88,6 @@ def decrypt( return rend_func(data, translate_newlines=translate_newlines) -def reinit_crypto(): - """ - When a fork arises, pycrypto needs to reinit - From its doc:: - - Caveat: For the random number generator to work correctly, - you must call Random.atfork() in both the parent and - child processes after using os.fork() - - """ - if HAS_CRYPTODOME or HAS_CRYPTO: - Random.atfork() - - def pem_finger(path=None, key=None, sum_type="sha256"): """ Pass in either a raw pem string, or the path on disk to the location of a diff --git a/tests/conftest.py b/tests/conftest.py index bb37b7b748a..04b885a8f9f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -996,6 +996,9 @@ def salt_syndic_master_factory( config_overrides = { "log_level_logfile": "quiet", "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } ext_pillar = [] if salt.utils.platform.is_windows(): @@ -1112,6 +1115,9 @@ def salt_master_factory( config_overrides = { "log_level_logfile": "quiet", "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } ext_pillar = [] if salt.utils.platform.is_windows(): @@ -1221,6 +1227,8 @@ def salt_minion_factory(salt_master_factory): "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, + "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() @@ -1253,6 +1261,8 @@ def salt_sub_minion_factory(salt_master_factory): "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, + "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 591f1baafce..fe5f7c755ed 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -191,6 +191,9 @@ def salt_master_factory( config_overrides = { "pytest-master": {"log": {"level": "DEBUG"}}, "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } ext_pillar = [] if salt.utils.platform.is_windows(): @@ -321,6 +324,8 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, + "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() @@ -352,6 +357,8 @@ def salt_sub_minion_factory(salt_master_factory, salt_sub_minion_id): "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, + "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/pytests/unit/conftest.py b/tests/pytests/unit/conftest.py index c7152f3d2d1..444c2a147d4 100644 --- a/tests/pytests/unit/conftest.py +++ b/tests/pytests/unit/conftest.py @@ -3,6 +3,7 @@ import os import pytest import salt.config +from tests.conftest import FIPS_TESTRUN @pytest.fixture @@ -10,6 +11,7 @@ def minion_opts(tmp_path): """ Default minion configuration with relative temporary paths to not require root permissions. """ + print(f"WTF {FIPS_TESTRUN}") root_dir = tmp_path / "minion" opts = salt.config.DEFAULT_MINION_OPTS.copy() opts["__role"] = "minion" @@ -23,6 +25,9 @@ def minion_opts(tmp_path): opts[name] = str(dirpath) opts["log_file"] = "logs/minion.log" opts["conf_file"] = os.path.join(opts["conf_dir"], "minion") + opts["fips_mode"] = FIPS_TESTRUN + opts["encryption_algorithm"] = "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1" + opts["signing_algorithm"] = "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" return opts @@ -41,6 +46,10 @@ def master_opts(tmp_path): opts[name] = str(dirpath) opts["log_file"] = "logs/master.log" opts["conf_file"] = os.path.join(opts["conf_dir"], "master") + opts["fips_mode"] = FIPS_TESTRUN + opts["publish_signing_algorithm"] = ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ) return opts diff --git a/tests/pytests/unit/crypt/test_crypt_cryptography.py b/tests/pytests/unit/crypt/test_crypt_cryptography.py index c66c8776d43..f0620f5ee69 100644 --- a/tests/pytests/unit/crypt/test_crypt_cryptography.py +++ b/tests/pytests/unit/crypt/test_crypt_cryptography.py @@ -2,6 +2,7 @@ import hashlib import hmac import os +import cryptography.exceptions import pytest from cryptography.hazmat.backends.openssl import backend from cryptography.hazmat.primitives import serialization @@ -29,6 +30,45 @@ else: from . import AES, PKCS1_OAEP +OPENSSL_ENCRYPTED_KEY = """ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-256-CBC,12E2FE8CA93672B629477073867DE7E4 + +3wgKQlgjT3G+8rIQt97AXMDByh5u9JMZPYOB9/wg3iC3qoJXfoFAsCNUjODJBnkI +j9Zgj/bOCaSM3UshMQXmYY+2Rfi1SVQPnETlqEH/plMZS38tB8mN5pBdthgGTw6c +rhpj/S23eZT5d+z5ODeVYlWCVhx8CtE8OQEzOQk8dxLWbVHhvgC3uJGWOPR3P7VM +BlxH5LxWRCrC8vzbnwwaJnX8BTQ7fc4qeGwHlXBjpnxQhxO27pEj08NQ0/lfKh1b +seX5uiCjuQhHFKNGTuA16rQIe6BkYRHIWCDhySftl/lqSfLQif0OAXaHEdL2EdIS +ySD12RYyNUDotEzYFF+qzJ5OAtraqvc8kYror7oN52bHKCjJyrp4+DWA4/N7FjTV ++FqUyJNKqw1DAQAxlZlq+GgNyi8+g/Zs2TKTc/ZXaPLjtWYOQEQkYaNBoaD3ydY3 +c7x+uQtJLVW9BF9FSl9A7BItpqZQWKiHiGtUdhYOkemlR+zMatjBe/eTq8LrnEDa +IyI+rRo1PSDAz3n1pdEAzGAOeqwT+j7YG9O8+dybMY5FcAtiiPX21nIpmP+Rtx4X +GqzHsT7nM6QG4O8GPKuK6TniG+Q0doNWomwuau/cjQgL4C+yFiX3kIPpHz9kA/aS +NL1SJqSsvc3D/KlRbHXaJZJyhmzDuEbQynkaAdvejiajlJWAwA3BZWw1RUK7Wn8m +XcNPJL3g02uKq8SUDgVQl/cx4QawuWri2Xh8/xakNYWzNU2feoWBmV+gN2qDSxyz +Qi+xu3CzdJVrPs71lW0rEAIQvU3K3Umava9M4CUF6R7N9+Zv+m1EuMQs0IGt8VCY +Wo1KY5PAb/V718d1C3I6kXvLSDXG8xqyEleilPLhKCRGPK+2g0nGYu562EV1i6by +gr+PLnFJTfgHEzwIfsqfNoR8ReQ6AJKJoniQr4vqex9xtifuhes0odpqmUB4/B2C +UfY/SpJR6tzdrGndpB/vb1vjHumHklHHWrLONtz70BhR8Zaisc7SCmL5bFgWqzMC +MJKPulRRGQCPAzy5OI/ZULY8+dzlva1MyoCYlWjeUtcUAy+9dyA8GZv75ez9g71b +10nNINDcvGG7zWShSYrAKrvLlsoE7eZ+flG+XhI2CfiC9/zHBzy/slbaH9H+1tlO +VWKiw6iBb2TEvBk4Wpk2nUFlWKtkkBVAlgbShbE2K8pTHrJeIRv5J897k693NFZE +DjVVJirzMv/OiZTami0qBQ4nDtUZpH8FsFZ8DtREkhROsDmrjq9PGkOVaxEyF/ke +avJT34gp4OoNWj7/Rd1YNbGiWjMEB3zi9y1Q6Ufiod9ZlK3RQb4tNrpzDn/msdJo +pIkuByWjXjF4YQRKtAoeCn+CWiY7L/Qi8X7jmX27JLILlZPtTJ+aNp3eCr6ZX0dW +y0uhN89sgMewlvDA/pduL/VJRHUBZC2eD8FbD7p6K+yRKhdciS9A8F6aIhx615s6 +mngRBfwzh8ST6yQgLwCgle/XaRYTWJKjzAe3lkaIBBhHpeuq1UMAjunoS8JnLNiy +xQJ0PznzY57sYKpIiClwMjfpnX47nTU2DFWuPEXvBtG1xMjacGPbVrUslesY5bii +-----END RSA PRIVATE KEY----- +""" + + +@pytest.fixture +def openssl_encrypted_key(): + return OPENSSL_ENCRYPTED_KEY + + @pytest.fixture def passphrase(): return "pass1234" @@ -284,3 +324,51 @@ def test_sign_message_with_passphrase(signature, signing_algorithm): def test_verify_signature(): with patch("salt.utils.files.fopen", mock_open(read_data=PUBKEY_DATA.encode())): assert salt.crypt.verify_signature("/keydir/keyname.pub", MSG, SIG) + + +def test_loading_encrypted_openssl_format(openssl_encrypted_key, passphrase, tmp_path): + path = tmp_path / "key" + path.write_text(openssl_encrypted_key) + if FIPS_TESTRUN: + with pytest.raises(ValueError): + salt.crypt.get_rsa_key(path, passphrase) + else: + try: + salt.crypt.get_rsa_key(path, passphrase) + # BaseException to catch errors bubbling up from the cryptogrphy's + # rust layer. + except BaseException as exc: # pylint: disable=broad-except + pytest.fail(f"Unexpected exception: {exc}") + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only valid when in FIPS mode") +def test_fips_bad_signing_algo(private_key, passphrase): + key = salt.crypt.PrivateKey(private_key, passphrase) + with pytest.raises(cryptography.exceptions.UnsupportedAlgorithm): + key.sign("meh", salt.crypt.PKCS1v15_SHA1) + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only valid when in FIPS mode") +def test_fips_bad_signing_algo_verification(private_key, passphrase): + lpriv = LegacyPrivateKey(private_key.encode(), passphrase.encode()) + data = b"meh" + signature = lpriv.sign(data) + pubkey = salt.crypt.PublicKey(private_key.replace(".pem", ".pub")) + # cryptogrpahy silently returns False on unsuppoted algorithm + assert pubkey.verify(signature, salt.crypt.PKCS1v15_SHA1) is False + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only valid when in FIPS mode") +def test_fips_bad_encryption_algo(private_key, passphrase): + key = salt.crypt.PublicKey(private_key.replace(".pem", ".pub")) + with pytest.raises(cryptography.exceptions.UnsupportedAlgorithm): + key.encrypt("meh", salt.crypt.OAEP_SHA1) + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only valid when in FIPS mode") +def test_fips_bad_decryption_algo(private_key, passphrase): + pubkey = LegacyPublicKey(private_key.replace(".pem", ".pub")) + data = pubkey.encrypt("meh") + key = salt.crypt.PrivateKey(private_key, passphrase) + with pytest.raises(cryptography.exceptions.UnsupportedAlgorithm): + key.decrypt(data) diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 5ce76bdd19f..b9de5f514aa 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -28,19 +28,9 @@ import salt.utils.platform import salt.utils.process import salt.utils.stringutils from salt.master import SMaster +from tests.conftest import FIPS_TESTRUN from tests.support.mock import AsyncMock, MagicMock, patch -try: - from M2Crypto import RSA - - HAS_M2 = True -except ImportError: - HAS_M2 = False - try: - from Cryptodome.Cipher import PKCS1_OAEP - except ImportError: - from Crypto.Cipher import PKCS1_OAEP # nosec - log = logging.getLogger(__name__) @@ -224,6 +214,20 @@ oQIDAQAB AES_KEY = "8wxWlOaMMQ4d3yT74LL4+hGrGTf65w8VgrcNjLJeLRQ2Q6zMa8ItY2EQUgMKKDb7JY+RnPUxbB0=" +@pytest.fixture +def signing_algorithm(): + if FIPS_TESTRUN: + return salt.crypt.PKCS1v15_SHA224 + return salt.crypt.PKCS1v15_SHA1 + + +@pytest.fixture +def encryption_algorithm(): + if FIPS_TESTRUN: + return salt.crypt.OAEP_SHA224 + return salt.crypt.OAEP_SHA1 + + @pytest.fixture def pki_dir(tmp_path): _pki_dir = tmp_path / "pki" @@ -605,37 +609,44 @@ def test_zeromq_async_pub_channel_filtering_decode_message( assert res.result()["enc"] == "aes" -def test_req_server_chan_encrypt_v2(pki_dir): +def test_req_server_chan_encrypt_v2( + pki_dir, encryption_algorithm, signing_algorithm, master_opts +): loop = salt.ext.tornado.ioloop.IOLoop.current() - opts = { - "worker_threads": 1, - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "zmq_monitor": False, - "mworker_queue_niceness": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("master")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - } - server = salt.channel.server.ReqServerChannel.factory(opts) + master_opts.update( + { + "worker_threads": 1, + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "zmq_monitor": False, + "mworker_queue_niceness": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("master")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + } + ) + server = salt.channel.server.ReqServerChannel.factory(master_opts) dictkey = "pillar" nonce = "abcdefg" pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private(pillar_data, dictkey, "minion", nonce) + ret = server._encrypt_private( + pillar_data, + dictkey, + "minion", + nonce, + encryption_algorithm=encryption_algorithm, + signing_algorithm=signing_algorithm, + ) assert "key" in ret assert dictkey in ret - key = salt.crypt.get_rsa_key(str(pki_dir.joinpath("minion", "minion.pem")), None) - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment - aes = cipher.decrypt(ret["key"]) - pcrypt = salt.crypt.Crypticle(opts, aes) + key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem"))) + aes = key.decrypt(ret["key"], encryption_algorithm) + pcrypt = salt.crypt.Crypticle(master_opts, aes) signed_msg = pcrypt.loads(ret[dictkey]) assert "sig" in signed_msg @@ -649,93 +660,105 @@ def test_req_server_chan_encrypt_v2(pki_dir): assert data["pillar"] == pillar_data -def test_req_server_chan_encrypt_v1(pki_dir): +def test_req_server_chan_encrypt_v1(pki_dir, encryption_algorithm, master_opts): loop = salt.ext.tornado.ioloop.IOLoop.current() - opts = { - "worker_threads": 1, - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "zmq_monitor": False, - "mworker_queue_niceness": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("master")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - } - server = salt.channel.server.ReqServerChannel.factory(opts) + master_opts.update( + { + "worker_threads": 1, + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "zmq_monitor": False, + "mworker_queue_niceness": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("master")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + } + ) + server = salt.channel.server.ReqServerChannel.factory(master_opts) dictkey = "pillar" nonce = "abcdefg" pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private(pillar_data, dictkey, "minion", sign_messages=False) + ret = server._encrypt_private( + pillar_data, + dictkey, + "minion", + sign_messages=False, + encryption_algorithm=encryption_algorithm, + ) assert "key" in ret assert dictkey in ret - key = salt.crypt.get_rsa_key(str(pki_dir.joinpath("minion", "minion.pem")), None) - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) - pcrypt = salt.crypt.Crypticle(opts, aes) + key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem"))) + aes = key.decrypt(ret["key"], encryption_algorithm) + pcrypt = salt.crypt.Crypticle(master_opts, aes) data = pcrypt.loads(ret[dictkey]) assert data == pillar_data -def test_req_chan_decode_data_dict_entry_v1(pki_dir): +def test_req_chan_decode_data_dict_entry_v1( + pki_dir, encryption_algorithm, minion_opts, master_opts +): mockloop = MagicMock() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) + master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.ReqChannel.factory(opts, io_loop=mockloop) + client = salt.channel.client.ReqChannel.factory(minion_opts, io_loop=mockloop) dictkey = "pillar" target = "minion" pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private(pillar_data, dictkey, target, sign_messages=False) + ret = server._encrypt_private( + pillar_data, + dictkey, + target, + sign_messages=False, + encryption_algorithm=encryption_algorithm, + ) key = client.auth.get_keys() - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + aes = key.decrypt(ret["key"], encryption_algorithm) pcrypt = salt.crypt.Crypticle(client.opts, aes) ret_pillar_data = pcrypt.loads(ret[dictkey]) assert ret_pillar_data == pillar_data -async def test_req_chan_decode_data_dict_entry_v2(pki_dir): +async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_dir): mockloop = MagicMock() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=mockloop) + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) dictkey = "pillar" target = "minion" @@ -743,7 +766,7 @@ async def test_req_chan_decode_data_dict_entry_v2(pki_dir): # Mock auth and message client. auth = client.auth - auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) client.auth = MagicMock() client.auth.mpub = auth.mpub client.auth.authenticated = True @@ -752,12 +775,20 @@ async def test_req_chan_decode_data_dict_entry_v2(pki_dir): client.auth.crypticle.loads = auth.crypticle.loads client.transport = MagicMock() + print(minion_opts["encryption_algorithm"]) + @salt.ext.tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True + pillar_data, + dictkey, + target, + nonce=load["nonce"], + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) raise salt.ext.tornado.gen.Return(ret) @@ -784,24 +815,28 @@ async def test_req_chan_decode_data_dict_entry_v2(pki_dir): assert ret == {"pillar1": "meh"} -async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(pki_dir): +async def test_req_chan_decode_data_dict_entry_v2_bad_nonce( + pki_dir, minion_opts, master_opts +): mockloop = MagicMock() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=mockloop) + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) dictkey = "pillar" badnonce = "abcdefg" @@ -810,7 +845,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(pki_dir): # Mock auth and message client. auth = client.auth - auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) client.auth = MagicMock() client.auth.mpub = auth.mpub client.auth.authenticated = True @@ -819,7 +854,13 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(pki_dir): client.auth.crypticle.loads = auth.crypticle.loads client.transport = MagicMock() ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=badnonce, sign_messages=True + pillar_data, + dictkey, + target, + nonce=badnonce, + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) @salt.ext.tornado.gen.coroutine @@ -850,24 +891,28 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce(pki_dir): assert "Pillar nonce verification failed." == excinfo.value.message -async def test_req_chan_decode_data_dict_entry_v2_bad_signature(pki_dir): +async def test_req_chan_decode_data_dict_entry_v2_bad_signature( + pki_dir, minion_opts, master_opts +): mockloop = MagicMock() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=mockloop) + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) dictkey = "pillar" badnonce = "abcdefg" @@ -876,7 +921,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature(pki_dir): # Mock auth and message client. auth = client.auth - auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) client.auth = MagicMock() client.auth.mpub = auth.mpub client.auth.authenticated = True @@ -890,15 +935,17 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature(pki_dir): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True + pillar_data, + dictkey, + target, + nonce=load["nonce"], + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) key = client.auth.get_keys() - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + aes = key.decrypt(ret["key"], minion_opts["encryption_algorithm"]) pcrypt = salt.crypt.Crypticle(client.opts, aes) signed_msg = pcrypt.loads(ret[dictkey]) # Changing the pillar data will cause the signature verification to @@ -932,24 +979,28 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature(pki_dir): assert "Pillar payload signature failed to validate." == excinfo.value.message -async def test_req_chan_decode_data_dict_entry_v2_bad_key(pki_dir): +async def test_req_chan_decode_data_dict_entry_v2_bad_key( + pki_dir, minion_opts, master_opts +): mockloop = MagicMock() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=mockloop) + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) dictkey = "pillar" badnonce = "abcdefg" @@ -958,7 +1009,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key(pki_dir): # Mock auth and message client. auth = client.auth - auth._crypticle = salt.crypt.Crypticle(opts, AES_KEY) + auth._crypticle = salt.crypt.Crypticle(master_opts, AES_KEY) client.auth = MagicMock() client.auth.mpub = auth.mpub client.auth.authenticated = True @@ -972,30 +1023,28 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key(pki_dir): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True + pillar_data, + dictkey, + target, + nonce=load["nonce"], + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) - key = client.auth.get_keys() - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + mkey = client.auth.get_keys() + aes = mkey.decrypt(ret["key"], minion_opts["encryption_algorithm"]) pcrypt = salt.crypt.Crypticle(client.opts, aes) signed_msg = pcrypt.loads(ret[dictkey]) # Now encrypt with a different key key = salt.crypt.Crypticle.generate_key_string() - pcrypt = salt.crypt.Crypticle(opts, key) + pcrypt = salt.crypt.Crypticle(master_opts, key) pubfn = os.path.join(master_opts["pki_dir"], "minions", "minion") - pub = salt.crypt.get_rsa_pub_key(pubfn) + pub = salt.crypt.PublicKey(pubfn) ret[dictkey] = pcrypt.dumps(signed_msg) key = salt.utils.stringutils.to_bytes(key) - if HAS_M2: - ret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(pub) - ret["key"] = cipher.encrypt(key) + ret["key"] = pub.encrypt(key, minion_opts["encryption_algorithm"]) raise salt.ext.tornado.gen.Return(ret) client.transport.send = mocksend @@ -1013,33 +1062,39 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key(pki_dir): "cmd": "_pillar", } - with pytest.raises(salt.crypt.AuthenticationError) as excinfo: - await client.crypted_transfer_decode_dictentry( - load, - dictkey="pillar", - ) - assert "Key verification failed." == excinfo.value.message + try: + with pytest.raises(salt.crypt.AuthenticationError) as excinfo: + await client.crypted_transfer_decode_dictentry( + load, + dictkey="pillar", + ) + assert "Key verification failed." == excinfo.value.message + finally: + client.close() + server.close() -async def test_req_serv_auth_v1(pki_dir): - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } +async def test_req_serv_auth_v1(pki_dir, minion_opts, master_opts): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1047,7 +1102,7 @@ async def test_req_serv_auth_v1(pki_dir): ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False @@ -1069,30 +1124,34 @@ async def test_req_serv_auth_v1(pki_dir): "id": "minion", "token": token, "pub": pub_key, + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": minion_opts["signing_algorithm"], } ret = server._auth(load, sign_messages=False) assert "load" not in ret -async def test_req_serv_auth_v2(pki_dir): - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } +async def test_req_serv_auth_v2(pki_dir, minion_opts, master_opts): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1100,7 +1159,7 @@ async def test_req_serv_auth_v2(pki_dir): ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False @@ -1123,32 +1182,36 @@ async def test_req_serv_auth_v2(pki_dir): "nonce": nonce, "token": token, "pub": pub_key, + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": minion_opts["signing_algorithm"], } ret = server._auth(load, sign_messages=True) assert "sig" in ret assert "load" in ret -async def test_req_chan_auth_v2(pki_dir, io_loop): - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } +async def test_req_chan_auth_v2(pki_dir, io_loop, minion_opts, master_opts): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "publish_port": 4505, + "auth_mode": 1, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1156,15 +1219,15 @@ async def test_req_chan_auth_v2(pki_dir, io_loop): ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) master_opts["master_sign_pubkey"] = False server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False server.master_key = salt.crypt.MasterKeys(server.opts) - opts["verify_master_pubkey_sign"] = False - opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=io_loop) + minion_opts["verify_master_pubkey_sign"] = False + minion_opts["always_verify_signature"] = False + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) signin_payload = client.auth.minion_sign_in_payload() pload = client._package_load(signin_payload) assert "version" in pload @@ -1178,26 +1241,30 @@ async def test_req_chan_auth_v2(pki_dir, io_loop): assert "publish_port" in ret -async def test_req_chan_auth_v2_with_master_signing(pki_dir, io_loop): - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } +async def test_req_chan_auth_v2_with_master_signing( + pki_dir, io_loop, minion_opts, master_opts +): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "publish_port": 4505, + "auth_mode": 1, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1205,26 +1272,26 @@ async def test_req_chan_auth_v2_with_master_signing(pki_dir, io_loop): ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master"))) master_opts["master_sign_pubkey"] = True master_opts["master_use_pubkey_signature"] = False - master_opts["signing_key_pass"] = True + master_opts["signing_key_pass"] = "" master_opts["master_sign_key_name"] = "master_sign" server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False server.master_key = salt.crypt.MasterKeys(server.opts) - opts["verify_master_pubkey_sign"] = True - opts["always_verify_signature"] = True - opts["master_sign_key_name"] = "master_sign" - opts["master"] = "master" + minion_opts["verify_master_pubkey_sign"] = True + minion_opts["always_verify_signature"] = True + minion_opts["master_sign_key_name"] = "master_sign" + minion_opts["master"] = "master" assert ( pki_dir.joinpath("minion", "minion_master.pub").read_text() == pki_dir.joinpath("master", "master.pub").read_text() ) - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=io_loop) + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) signin_payload = client.auth.minion_sign_in_payload() pload = client._package_load(signin_payload) assert "version" in pload @@ -1269,28 +1336,32 @@ async def test_req_chan_auth_v2_with_master_signing(pki_dir, io_loop): ) -async def test_req_chan_auth_v2_new_minion_with_master_pub(pki_dir, io_loop): +async def test_req_chan_auth_v2_new_minion_with_master_pub( + pki_dir, io_loop, minion_opts, master_opts +): pki_dir.joinpath("master", "minions", "minion").unlink() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "publish_port": 4505, + "auth_mode": 1, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1298,15 +1369,15 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub(pki_dir, io_loop): ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) master_opts["master_sign_pubkey"] = False server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False server.master_key = salt.crypt.MasterKeys(server.opts) - opts["verify_master_pubkey_sign"] = False - opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=io_loop) + minion_opts["verify_master_pubkey_sign"] = False + minion_opts["always_verify_signature"] = False + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) signin_payload = client.auth.minion_sign_in_payload() pload = client._package_load(signin_payload) assert "version" in pload @@ -1318,7 +1389,9 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub(pki_dir, io_loop): assert ret == "retry" -async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig(pki_dir, io_loop): +async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig( + pki_dir, io_loop, minion_opts, master_opts +): pki_dir.joinpath("master", "minions", "minion").unlink() @@ -1330,25 +1403,27 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig(pki_dir, io_l mapub.unlink() mapub.write_text(MASTER2_PUB_KEY.strip()) - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "publish_port": 4505, + "auth_mode": 1, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1356,15 +1431,16 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig(pki_dir, io_l ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) - master_opts["master_sign_pubkey"] = False + master_opts.update( + pki_dir=str(pki_dir.joinpath("master")), master_sign_pubkey=False + ) server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False server.master_key = salt.crypt.MasterKeys(server.opts) - opts["verify_master_pubkey_sign"] = False - opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=io_loop) + minion_opts["verify_master_pubkey_sign"] = False + minion_opts["always_verify_signature"] = False + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) signin_payload = client.auth.minion_sign_in_payload() pload = client._package_load(signin_payload) assert "version" in pload @@ -1376,29 +1452,36 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig(pki_dir, io_l ret = client.auth.handle_signin_response(signin_payload, ret) -async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop): +async def test_req_chan_auth_v2_new_minion_without_master_pub( + minion_opts, + master_opts, + pki_dir, + io_loop, +): pki_dir.joinpath("master", "minions", "minion").unlink() pki_dir.joinpath("minion", "minion_master.pub").unlink() - opts = { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "publish_port": 4505, + "auth_mode": 1, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + ) SMaster.secrets["aes"] = { "secret": multiprocessing.Array( ctypes.c_char, @@ -1406,24 +1489,28 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop): ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) master_opts["master_sign_pubkey"] = False server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False server.master_key = salt.crypt.MasterKeys(server.opts) - opts["verify_master_pubkey_sign"] = False - opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(opts, io_loop=io_loop) + minion_opts["verify_master_pubkey_sign"] = False + minion_opts["always_verify_signature"] = False + client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) signin_payload = client.auth.minion_sign_in_payload() pload = client._package_load(signin_payload) - assert "version" in pload - assert pload["version"] == 2 + try: + assert "version" in pload + assert pload["version"] == 2 - ret = server._auth(pload["load"], sign_messages=True) - assert "sig" in ret - ret = client.auth.handle_signin_response(signin_payload, ret) - assert ret == "retry" + ret = server._auth(pload["load"], sign_messages=True) + assert "sig" in ret + ret = client.auth.handle_signin_response(signin_payload, ret) + assert ret == "retry" + finally: + client.close() + server.close() async def test_req_server_garbage_request(io_loop): diff --git a/tests/pytests/unit/utils/test_crypt.py b/tests/pytests/unit/utils/test_crypt.py index ccf2cfbf46e..3aa1c409751 100644 --- a/tests/pytests/unit/utils/test_crypt.py +++ b/tests/pytests/unit/utils/test_crypt.py @@ -5,29 +5,6 @@ Unit tests for salt.utils.crypt.py import pytest import salt.utils.crypt -from tests.support.mock import patch - -try: - import M2Crypto # pylint: disable=unused-import - - HAS_M2CRYPTO = True -except ImportError: - HAS_M2CRYPTO = False - -try: - from Cryptodome import Random as CryptodomeRandom - - HAS_CYPTODOME = True -except ImportError: - HAS_CYPTODOME = False - - -try: - from Crypto import Random as CryptoRandom # nosec - - HAS_CRYPTO = True -except ImportError: - HAS_CRYPTO = False @pytest.fixture @@ -45,28 +22,6 @@ def pub_key_data(): ] -def test_random(): - # make sure the right library is used for random - if HAS_M2CRYPTO: - assert None is salt.utils.crypt.Random - elif HAS_CYPTODOME: - assert CryptodomeRandom is salt.utils.crypt.Random - elif HAS_CRYPTO: - assert CryptoRandom is salt.utils.crypt.Random - - -def test_reinit_crypto(): - # make sure reinit crypto does not crash - salt.utils.crypt.reinit_crypto() - - # make sure reinit does not crash when no crypt is found - with patch("salt.utils.crypt.HAS_M2CRYPTO", False): - with patch("salt.utils.crypt.HAS_CRYPTODOME", False): - with patch("salt.utils.crypt.HAS_CRYPTO", False): - with patch("salt.utils.crypt.Random", None): - salt.utils.crypt.reinit_crypto() - - @pytest.mark.parametrize("line_ending", ["\n", "\r\n"]) def test_pem_finger_file_line_endings(tmp_path, pub_key_data, line_ending): key_file = tmp_path / "master_crlf.pub" From 277e56b1139f868f89455ccf368e20a336eb2ee5 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 14:24:36 -0700 Subject: [PATCH 078/160] Fix encryption config for tests --- doc/ref/configuration/minion.rst | 4 ++-- salt/crypt.py | 19 ++++--------------- salt/transport/zeromq.py | 1 - tests/conftest.py | 8 ++++---- tests/pytests/conftest.py | 8 ++++---- .../functional/transport/server/conftest.py | 5 +++++ .../transport/server/test_req_channel.py | 3 +++ tests/pytests/unit/cloud/clouds/test_ec2.py | 4 +--- tests/pytests/unit/conftest.py | 1 - 9 files changed, 23 insertions(+), 30 deletions(-) diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst index 826c705b988..50dadf93481 100644 --- a/doc/ref/configuration/minion.rst +++ b/doc/ref/configuration/minion.rst @@ -3178,7 +3178,7 @@ constant names without ssl module prefix: ``CERT_REQUIRED`` or ``PROTOCOL_SSLv23 Default: OAEP-SHA1 -Then RSA encryption algorithm used by this minion when connecting to the +The RSA encryption algorithm used by this minion when connecting to the master's request channel. Valid values are ``OAEP-SHA1`` and ``OAEP-SHA224`` @@ -3189,7 +3189,7 @@ master's request channel. Valid values are ``OAEP-SHA1`` and ``OAEP-SHA224`` Default: PKCS1v15-SHA1 -Then RSA signing algorithm used by this minion when connecting to the +The RSA signing algorithm used by this minion when connecting to the master's request channel. Valid values are ``PKCS1v15-SHA1`` and ``PKCS1v15-SHA224`` diff --git a/salt/crypt.py b/salt/crypt.py index f4e397a9bba..2ba06360dac 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -304,21 +304,10 @@ def _get_key_with_evict(path, timestamp, passphrase): else: password = None with salt.utils.files.fopen(path, "rb") as f: - try: - return serialization.load_pem_private_key( - f.read(), - password=password, - ) - except BaseException as exc: - log.error("Exception is %r", exc) - if ( - exc.__class__.__module__ == "pyo3_runtime" - and exc.__class__.__name__ == "PanicException" - ): - if 'reason: "unsupported"' in exc.args[0]: - log.error("Unsupported key") - raise InvalidKeyError("Unsupported encryption algorithm") - raise + return serialization.load_pem_private_key( + f.read(), + password=password, + ) def get_rsa_key(path, passphrase): diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index f2e96d473d1..fb66f5015ad 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -749,7 +749,6 @@ class PublishServer(salt.transport.base.DaemonizedPublishServer): try: for package in packages: payload = salt.payload.loads(package) - log.error("on recv") yield publish_payload(payload) except Exception as exc: # pylint: disable=broad-except log.error( diff --git a/tests/conftest.py b/tests/conftest.py index 04b885a8f9f..9d673a3092c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1227,8 +1227,8 @@ def salt_minion_factory(salt_master_factory): "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, - "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", - "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() @@ -1261,8 +1261,8 @@ def salt_sub_minion_factory(salt_master_factory): "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, - "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", - "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index fe5f7c755ed..985e34e0816 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -324,8 +324,8 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, - "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", - "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() @@ -357,8 +357,8 @@ def salt_sub_minion_factory(salt_master_factory, salt_sub_minion_id): "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, - "rsa_encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", - "rsa_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/pytests/functional/transport/server/conftest.py b/tests/pytests/functional/transport/server/conftest.py index 1f0303c9fe4..cc012275d99 100644 --- a/tests/pytests/functional/transport/server/conftest.py +++ b/tests/pytests/functional/transport/server/conftest.py @@ -1,6 +1,8 @@ import pytest from saltfactories.utils import random_string +from tests.conftest import FIPS_TESTRUN + def transport_ids(value): return f"Transport({value})" @@ -34,6 +36,9 @@ def salt_minion(salt_master, transport): "auth_timeout": 5, "auth_tries": 1, "master_uri": "tcp://127.0.0.1:{}".format(salt_master.config["ret_port"]), + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_master.salt_minion_daemon( random_string(f"server-{transport}-minion-"), diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py index 2eac76d352c..2fb3b569d54 100644 --- a/tests/pytests/functional/transport/server/test_req_channel.py +++ b/tests/pytests/functional/transport/server/test_req_channel.py @@ -31,6 +31,7 @@ pytestmark = [ class ReqServerChannelProcess(salt.utils.process.SignalHandlingProcess): + def __init__(self, config, req_channel_crypt): super().__init__() self._closing = False @@ -104,6 +105,7 @@ class ReqServerChannelProcess(salt.utils.process.SignalHandlingProcess): @pytest.fixture def req_server_channel(salt_master, req_channel_crypt): + print(f"master pub {salt_master.config['publish_signing_algorithm']}") req_server_channel_process = ReqServerChannelProcess( salt_master.config.copy(), req_channel_crypt ) @@ -127,6 +129,7 @@ def req_channel_crypt(request): @pytest.fixture def push_channel(req_server_channel, salt_minion, req_channel_crypt): + print(f"minion encryption {salt_minion.config['encryption_algorithm']}") with salt.channel.client.ReqChannel.factory( salt_minion.config, crypt=req_channel_crypt ) as _req_channel: diff --git a/tests/pytests/unit/cloud/clouds/test_ec2.py b/tests/pytests/unit/cloud/clouds/test_ec2.py index d538bff06b9..694171cd4d2 100644 --- a/tests/pytests/unit/cloud/clouds/test_ec2.py +++ b/tests/pytests/unit/cloud/clouds/test_ec2.py @@ -189,9 +189,7 @@ def test__validate_key_path_and_mode(): ec2._validate_key_path_and_mode("key_file") -@pytest.mark.skipif( - not salt.crypt.HAS_M2 and not salt.crypt.HAS_CRYPTO, reason="Needs crypto library" -) +@pytest.mark.skipif(not salt.crypt.HAS_CRYPTOGRAPHY, reason="Needs crypto library") def test_get_password_data(tmp_path): key_file = str(tmp_path / "keyfile.pem") diff --git a/tests/pytests/unit/conftest.py b/tests/pytests/unit/conftest.py index 444c2a147d4..965af00e89d 100644 --- a/tests/pytests/unit/conftest.py +++ b/tests/pytests/unit/conftest.py @@ -11,7 +11,6 @@ def minion_opts(tmp_path): """ Default minion configuration with relative temporary paths to not require root permissions. """ - print(f"WTF {FIPS_TESTRUN}") root_dir = tmp_path / "minion" opts = salt.config.DEFAULT_MINION_OPTS.copy() opts["__role"] = "minion" From 7d35efe5b96e67dbbbebd4b5e7f1a0e8da0d6a9b Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 15:51:16 -0700 Subject: [PATCH 079/160] validate minion crypto config --- salt/config/__init__.py | 12 ++++++++++++ salt/crypt.py | 8 ++++++++ 2 files changed, 20 insertions(+) diff --git a/salt/config/__init__.py b/salt/config/__init__.py index b80820f9745..993c7fdfcca 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -13,6 +13,7 @@ import types import urllib.parse from copy import deepcopy +import salt.crypt import salt.defaults.exitcodes import salt.exceptions import salt.features @@ -3855,6 +3856,17 @@ def apply_minion_config( _update_ssl_config(opts) _update_discovery_config(opts) + if opts["encryption_algorithm"] not in salt.crypt.VALID_ENCRYPTION_ALGORITHMS: + raise salt.exceptions.SaltConfigurationError( + f"The encryption algorithm '{opts['encryption_algorithm']}' is not valid. " + f"Please specify one of {','.join(salt.crypt.VALID_ENCRYPTION_ALGORITHMS)}." + ) + if opts["signing_algorithm"] not in salt.crypt.VALID_SIGNING_ALGORITHMS: + raise salt.exceptions.SaltConfigurationError( + f"The signging algorithm '{opts['signing_algorithm']}' is not valid. " + f"Please specify one of {','.join(salt.crypt.VALID_SIGNING_ALGORITHMS)}." + ) + return opts diff --git a/salt/crypt.py b/salt/crypt.py index 2ba06360dac..6f44da60198 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -75,6 +75,14 @@ VALID_HASHES = ( VALID_PADDING_FOR_SIGNING = (PKCS1v15,) VALID_PADDING_FOR_ENCRYPTION = (OAEP,) +VALID_ENCRYPTION_ALGORITHMS = ( + OAEP_SHA1, + OAEP_SHA224, +) +VALID_SIGNING_ALGORITHMS = ( + PKCS1v15_SHA1, + PKCS1v15_SHA224, +) def fips_enabled(): From 7eb4fb6cd3fad5dee87b9d37f20741c89e616a7f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 15:51:32 -0700 Subject: [PATCH 080/160] Remove reinit_crypto calls --- salt/utils/process.py | 4 ---- salt/utils/vt.py | 2 -- 2 files changed, 6 deletions(-) diff --git a/salt/utils/process.py b/salt/utils/process.py index 3578005266e..ce3a8be7c87 100644 --- a/salt/utils/process.py +++ b/salt/utils/process.py @@ -73,7 +73,6 @@ def daemonize(redirect_out=True): pid = os.fork() if pid > 0: # exit first parent - salt.utils.crypt.reinit_crypto() os._exit(salt.defaults.exitcodes.EX_OK) except OSError as exc: log.error("fork #1 failed: %s (%s)", exc.errno, exc) @@ -89,14 +88,11 @@ def daemonize(redirect_out=True): try: pid = os.fork() if pid > 0: - salt.utils.crypt.reinit_crypto() sys.exit(salt.defaults.exitcodes.EX_OK) except OSError as exc: log.error("fork #2 failed: %s (%s)", exc.errno, exc) sys.exit(salt.defaults.exitcodes.EX_GENERIC) - salt.utils.crypt.reinit_crypto() - # A normal daemonization redirects the process output to /dev/null. # Unfortunately when a python multiprocess is called the output is # not cleanly redirected and the parent process dies when the diff --git a/salt/utils/vt.py b/salt/utils/vt.py index 3ffe45af2bf..06867861201 100644 --- a/salt/utils/vt.py +++ b/salt/utils/vt.py @@ -461,8 +461,6 @@ class Terminal: else: os.close(tty_fd) - salt.utils.crypt.reinit_crypto() - if preexec_fn is not None: preexec_fn() From f6a880a2f11a617c7239d8accd5f15d7799520e1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 15:55:23 -0700 Subject: [PATCH 081/160] Fix master default publish_signing_algorithm --- salt/config/__init__.py | 2 +- tests/pytests/functional/transport/server/conftest.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 993c7fdfcca..823e6ebaa7a 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -1658,7 +1658,7 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze( "maintenance_interval": 3600, "fileserver_interval": 3600, "features": {}, - "publish_signing_algorithm": "PKCS1v15-SHA224", + "publish_signing_algorithm": "PKCS1v15-SHA1", } ) diff --git a/tests/pytests/functional/transport/server/conftest.py b/tests/pytests/functional/transport/server/conftest.py index cc012275d99..43c934a1077 100644 --- a/tests/pytests/functional/transport/server/conftest.py +++ b/tests/pytests/functional/transport/server/conftest.py @@ -19,6 +19,7 @@ def salt_master(salt_factories, transport): "transport": transport, "auto_accept": True, "sign_pub_messages": False, + "publish_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_factories.salt_master_daemon( random_string(f"server-{transport}-master-"), From a40196be9e0c283e60e6d0592e84b56fb85dfb74 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 15:56:16 -0700 Subject: [PATCH 082/160] Fix typo --- doc/ref/configuration/master.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index 1eb1f2b08b6..f056680cac5 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -2033,7 +2033,7 @@ The number of seconds between AES key rotations on the master. Default: PKCS1v15-SHA1 -Then RSA signing algorithm used by this minion when connecting to the +The RSA signing algorithm used by this minion when connecting to the master's request channel. Valid values are ``PKCS1v15-SHA1`` and ``PKCS1v15-SHA224``. Minions must be at version ``3006.9`` or greater if this is changed from the default setting. From 6d1cb70a37ead46458f22e86b4eb486b5eab0a22 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 16:00:18 -0700 Subject: [PATCH 083/160] fix black --- tests/pytests/functional/transport/server/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/transport/server/conftest.py b/tests/pytests/functional/transport/server/conftest.py index 43c934a1077..c0664643386 100644 --- a/tests/pytests/functional/transport/server/conftest.py +++ b/tests/pytests/functional/transport/server/conftest.py @@ -19,7 +19,9 @@ def salt_master(salt_factories, transport): "transport": transport, "auto_accept": True, "sign_pub_messages": False, - "publish_signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } factory = salt_factories.salt_master_daemon( random_string(f"server-{transport}-master-"), From 73d35c9d548808e047eb55cff581e4b015a714e3 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 21:29:40 -0700 Subject: [PATCH 084/160] More test update to support fips --- salt/cloud/clouds/ec2.py | 4 +- .../pytests/scenarios/multimaster/conftest.py | 15 ++++++ tests/pytests/unit/crypt/__init__.py | 2 +- .../unit/crypt/test_crypt_cryptodome.py | 46 ------------------- tests/pytests/unit/test_minion.py | 33 ------------- tests/pytests/unit/transport/test_tcp.py | 9 +++- 6 files changed, 25 insertions(+), 84 deletions(-) diff --git a/salt/cloud/clouds/ec2.py b/salt/cloud/clouds/ec2.py index 91e9eddd6b2..47eb71af300 100644 --- a/salt/cloud/clouds/ec2.py +++ b/salt/cloud/clouds/ec2.py @@ -192,7 +192,7 @@ def get_dependencies(): """ deps = { "requests": HAS_REQUESTS, - "pycrypto or m2crypto": salt.crypt.HAS_M2 or salt.crypt.HAS_CRYPTO, + "cryptography": salt.crypt.HAS_CRYPTOGRAPHY, } return config.check_driver_dependencies(__virtualname__, deps) @@ -4929,7 +4929,7 @@ def get_password_data( for item in data: ret[next(iter(item.keys()))] = next(iter(item.values())) - if not salt.crypt.HAS_M2 and not salt.crypt.HAS_CRYPTO: + if not salt.crypt.HAS_CRYPTOGRAPHY: if "key" in kwargs or "key_file" in kwargs: log.warning("No crypto library is installed, can not decrypt password") return ret diff --git a/tests/pytests/scenarios/multimaster/conftest.py b/tests/pytests/scenarios/multimaster/conftest.py index 1bfc830cb00..5a09a5ecd7b 100644 --- a/tests/pytests/scenarios/multimaster/conftest.py +++ b/tests/pytests/scenarios/multimaster/conftest.py @@ -8,6 +8,7 @@ import pytest from pytestshellutils.exceptions import FactoryTimeout import salt.utils.platform +from tests.conftest import FIPS_TESTRUN log = logging.getLogger(__name__) @@ -20,6 +21,10 @@ def salt_mm_master_1(request, salt_factories): } config_overrides = { "interface": "127.0.0.1", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } factory = salt_factories.salt_master_daemon( @@ -48,6 +53,10 @@ def salt_mm_master_2(salt_factories, salt_mm_master_1): } config_overrides = { "interface": "127.0.0.2", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } # Use the same ports for both masters, they are binding to different interfaces @@ -95,6 +104,9 @@ def salt_mm_minion_1(salt_mm_master_1, salt_mm_master_2): f"{mm_master_2_addr}:{mm_master_2_port}", ], "test.foo": "baz", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_mm_master_1.salt_minion_daemon( "mm-minion-1", @@ -122,6 +134,9 @@ def salt_mm_minion_2(salt_mm_master_1, salt_mm_master_2): f"{mm_master_2_addr}:{mm_master_2_port}", ], "test.foo": "baz", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_mm_master_2.salt_minion_daemon( "mm-minion-2", diff --git a/tests/pytests/unit/crypt/__init__.py b/tests/pytests/unit/crypt/__init__.py index e60c70e2509..0f08bb7c6c1 100644 --- a/tests/pytests/unit/crypt/__init__.py +++ b/tests/pytests/unit/crypt/__init__.py @@ -64,7 +64,6 @@ def legacy_gen_keys(keydir, keyname, keysize, user=None, passphrase=None): if HAS_M2: gen = RSA.gen_key(keysize, 65537, lambda: None) else: - salt.utils.crypt.reinit_crypto() gen = RSA.generate(bits=keysize, e=65537) if os.path.isfile(priv): @@ -115,6 +114,7 @@ def legacy_gen_keys(keydir, keyname, keysize, user=None, passphrase=None): class LegacyPrivateKey: + def __init__(self, path, passphrase=None): if HAS_M2: self.key = RSA.load_key(path, lambda x: bytes(passphrase)) diff --git a/tests/pytests/unit/crypt/test_crypt_cryptodome.py b/tests/pytests/unit/crypt/test_crypt_cryptodome.py index dd9f891aa49..281fdf2b991 100644 --- a/tests/pytests/unit/crypt/test_crypt_cryptodome.py +++ b/tests/pytests/unit/crypt/test_crypt_cryptodome.py @@ -5,33 +5,6 @@ import pytest import salt.crypt from tests.support.mock import MagicMock, MockCall, mock_open, patch -from . import MSG, PRIVKEY_DATA, PUBKEY_DATA, SIG - -try: - import M2Crypto # pylint: disable=unused-import - - HAS_M2 = True -except ImportError: - HAS_M2 = False -try: - from Cryptodome.PublicKey import RSA - - HAS_PYCRYPTO_RSA = True -except ImportError: - HAS_PYCRYPTO_RSA = False -if not HAS_PYCRYPTO_RSA: - try: - from Crypto.PublicKey import RSA # nosec - - HAS_PYCRYPTO_RSA = True - except ImportError: - HAS_PYCRYPTO_RSA = False - -pytestmark = [ - pytest.mark.skipif(not HAS_PYCRYPTO_RSA, reason="pycrypto >= 2.6 is not available"), - pytest.mark.skipif(HAS_M2, reason="m2crypto is used by salt.crypt if installed"), -] - @pytest.mark.slow_test def test_gen_keys(): @@ -87,22 +60,3 @@ def test_gen_keys_with_passphrase(): salt.crypt.gen_keys(key_path, "keyname", 2048) assert open_priv_wb in m_open.calls assert open_pub_wb in m_open.calls - - -def test_sign_message(): - key = RSA.importKey(PRIVKEY_DATA) - with patch("salt.crypt.get_rsa_key", return_value=key): - assert SIG == salt.crypt.sign_message("/keydir/keyname.pem", MSG) - - -def test_sign_message_with_passphrase(): - key = RSA.importKey(PRIVKEY_DATA) - with patch("salt.crypt.get_rsa_key", return_value=key): - assert SIG == salt.crypt.sign_message( - "/keydir/keyname.pem", MSG, passphrase="password" - ) - - -def test_verify_signature(): - with patch("salt.utils.files.fopen", mock_open(read_data=PUBKEY_DATA)): - assert salt.crypt.verify_signature("/keydir/keyname.pub", MSG, SIG) diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py index e5ca73a3f9a..3d43ed9d808 100644 --- a/tests/pytests/unit/test_minion.py +++ b/tests/pytests/unit/test_minion.py @@ -738,39 +738,6 @@ def test_gen_modules_executors(minion_opts): minion.destroy() -def test_reinit_crypto_on_fork(minion_opts): - """ - Ensure salt.utils.crypt.reinit_crypto() is executed when forking for new job - """ - minion_opts["multiprocessing"] = True - with patch("salt.utils.process.default_signals"): - - io_loop = salt.ext.tornado.ioloop.IOLoop() - io_loop.make_current() - minion = salt.minion.Minion(minion_opts, io_loop=io_loop) - - job_data = {"jid": "test-jid", "fun": "test.ping"} - - def mock_start(self): - # pylint: disable=comparison-with-callable - assert ( - len( - [ - x - for x in self._after_fork_methods - if x[0] == salt.utils.crypt.reinit_crypto - ] - ) - == 1 - ) - # pylint: enable=comparison-with-callable - - with patch.object( - salt.utils.process.SignalHandlingProcess, "start", mock_start - ): - io_loop.run_sync(lambda: minion._handle_decoded_payload(job_data)) - - def test_minion_manage_schedule(minion_opts): """ Tests that the manage_schedule will call the add function, adding diff --git a/tests/pytests/unit/transport/test_tcp.py b/tests/pytests/unit/transport/test_tcp.py index 17c7e072749..37e14678732 100644 --- a/tests/pytests/unit/transport/test_tcp.py +++ b/tests/pytests/unit/transport/test_tcp.py @@ -404,10 +404,11 @@ async def test_when_async_req_channel_with_syndic_role_should_use_syndic_master_ "transport": "tcp", "acceptance_wait_time": 30, "acceptance_wait_time_max": 30, + "signing_algorithm": "MOCK", } client = salt.channel.client.ReqChannel.factory(opts, io_loop=mockloop) assert client.master_pubkey_path == expected_pubkey_path - with patch("salt.crypt.verify_signature") as mock: + with patch("salt.crypt.PublicKey", return_value=MagicMock()) as mock: client.verify_signature("mockdata", "mocksig") assert mock.call_args_list[0][0][0] == expected_pubkey_path @@ -432,7 +433,11 @@ async def test_mixin_should_use_correct_path_when_syndic( } client = salt.channel.client.AsyncPubChannel.factory(opts, io_loop=mockloop) client.master_pubkey_path = expected_pubkey_path - payload = {"sig": "abc", "load": {"foo": "bar"}} + payload = { + "sig": "abc", + "load": {"foo": "bar"}, + "sig_algo": salt.crypt.PKCS1v15_SHA224, + } with patch("salt.crypt.verify_signature") as mock: client._verify_master_signature(payload) assert mock.call_args_list[0][0][0] == expected_pubkey_path From 7322f3796bf678e58198e6a520bf6684dfe71aaa Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 25 May 2024 21:32:45 -0700 Subject: [PATCH 085/160] Enforce valid publish_signing_algorithm config --- salt/config/__init__.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 823e6ebaa7a..0af8c0c1f46 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -4129,6 +4129,12 @@ def apply_master_config(overrides=None, defaults=None): _update_ssl_config(opts) _update_discovery_config(opts) + if opts["publish_signing_algorithm"] not in salt.crypt.VALID_SIGNING_ALGORITHMS: + raise salt.exceptions.SaltConfigurationError( + f"The publish signging algorithm '{opts['publish_signing_algorithm']}' is not valid. " + f"Please specify one of {','.join(salt.crypt.VALID_SIGNING_ALGORITHMS)}." + ) + return opts From 7b3a89c82129f41ae724c24d331d9dd8651ba114 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 26 May 2024 21:28:55 -0700 Subject: [PATCH 086/160] Fix tests undef fips --- .../pytests/functional/channel/test_server.py | 60 +++++++++++++------ .../pytests/integration/runners/test_vault.py | 32 +++++++++- tests/pytests/unit/crypt/test_crypt.py | 3 + tests/unit/test_config.py | 6 ++ tests/unit/test_crypt.py | 16 ----- 5 files changed, 81 insertions(+), 36 deletions(-) diff --git a/tests/pytests/functional/channel/test_server.py b/tests/pytests/functional/channel/test_server.py index 01cf43ac50c..277a5a58bdc 100644 --- a/tests/pytests/functional/channel/test_server.py +++ b/tests/pytests/functional/channel/test_server.py @@ -21,6 +21,7 @@ import salt.master import salt.utils.platform import salt.utils.process import salt.utils.stringutils +from tests.conftest import FIPS_TESTRUN log = logging.getLogger(__name__) @@ -63,14 +64,32 @@ def transport(request): @pytest.fixture def master_config(root_dir, transport): master_conf = salt.config.master_config("") - master_conf["transport"] = transport - master_conf["id"] = "master" - master_conf["root_dir"] = str(root_dir) - master_conf["sock_dir"] = str(root_dir) - master_conf["interface"] = "127.0.0.1" - master_conf["publish_port"] = ports.get_unused_localhost_port() - master_conf["ret_port"] = ports.get_unused_localhost_port() - master_conf["pki_dir"] = str(root_dir / "pki") + master_conf.update( + transport=transport, + id="master", + root_dir=str(root_dir), + sock_dir=str(root_dir), + interface="127.0.0.1", + publish_port=ports.get_unused_localhost_port(), + ret_port=ports.get_unused_localhost_port(), + pki_dir=str(root_dir / "pki"), + fips_mode=FIPS_TESTRUN, + publish_signing_algorithm=( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + ) + # master_conf["transport"] = transport + # master_conf["id"] = "master" + # master_conf["root_dir"] = str(root_dir) + # master_conf["sock_dir"] = str(root_dir) + # master_conf["interface"] = "127.0.0.1" + # master_conf["publish_port"] = ports.get_unused_localhost_port() + # master_conf["ret_port"] = ports.get_unused_localhost_port() + # master_conf["pki_dir"] = str(root_dir / "pki") + # "fips_mode": FIPS_TESTRUN, + # "publish_signing_algorithm": ( + # "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + # ), os.makedirs(master_conf["pki_dir"]) salt.crypt.gen_keys(master_conf["pki_dir"], "master", 4096) minions_keys = os.path.join(master_conf["pki_dir"], "minions") @@ -83,17 +102,22 @@ def minion_config(master_config, channel_minion_id): minion_conf = salt.config.minion_config( "", minion_id=channel_minion_id, cache_minion_id=False ) - minion_conf["transport"] = master_config["transport"] - minion_conf["root_dir"] = master_config["root_dir"] - minion_conf["id"] = channel_minion_id - minion_conf["sock_dir"] = master_config["sock_dir"] - minion_conf["ret_port"] = master_config["ret_port"] - minion_conf["interface"] = "127.0.0.1" - minion_conf["pki_dir"] = os.path.join(master_config["root_dir"], "pki_minion") + minion_conf.update( + transport=master_config["transport"], + root_dir=master_config["root_dir"], + id=channel_minion_id, + sock_dir=master_config["sock_dir"], + ret_port=master_config["ret_port"], + interface="127.0.0.1", + pki_dir=os.path.join(master_config["root_dir"], "pki_minion"), + master_port=master_config["ret_port"], + master_ip="127.0.0.1", + master_uri="tcp://127.0.0.1:{}".format(master_config["ret_port"]), + fips_mode=FIPS_TESTRUN, + encryption_algorithm="OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + signing_algorithm="PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + ) os.makedirs(minion_conf["pki_dir"]) - minion_conf["master_port"] = master_config["ret_port"] - minion_conf["master_ip"] = "127.0.0.1" - minion_conf["master_uri"] = "tcp://127.0.0.1:{}".format(master_config["ret_port"]) salt.crypt.gen_keys(minion_conf["pki_dir"], "minion", 4096) minion_pub = os.path.join(minion_conf["pki_dir"], "minion.pub") pub_on_master = os.path.join(master_config["pki_dir"], "minions", channel_minion_id) diff --git a/tests/pytests/integration/runners/test_vault.py b/tests/pytests/integration/runners/test_vault.py index e1d932abeb0..f14df4f70b3 100644 --- a/tests/pytests/integration/runners/test_vault.py +++ b/tests/pytests/integration/runners/test_vault.py @@ -7,6 +7,8 @@ import shutil import pytest +from tests.conftest import FIPS_TESTRUN + log = logging.getLogger(__name__) @@ -47,8 +49,16 @@ def pillar_salt_master(salt_factories, pillar_state_tree): }, "minion_data_cache": False, } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( - "vault-policy-int-master-uncached", defaults=config_defaults + "vault-policy-int-master-uncached", + defaults=config_defaults, + overrides=config_overrides, ) with factory.started(): yield factory @@ -72,8 +82,16 @@ def pillar_caching_salt_master(salt_factories, pillar_state_tree): }, "minion_data_cache": True, } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( - "vault-policy-int-master-cached", defaults=config_defaults + "vault-policy-int-master-cached", + defaults=config_defaults, + overrides=config_overrides, ) with factory.started(): yield factory @@ -85,6 +103,11 @@ def pillar_salt_minion(pillar_salt_master): factory = pillar_salt_master.salt_minion_daemon( "vault-policy-int-minion-uncached-1", defaults={"open_mode": True, "grains": {"foo": "bar"}}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All @@ -100,6 +123,11 @@ def pillar_caching_salt_minion(pillar_caching_salt_master): factory = pillar_caching_salt_master.salt_minion_daemon( "vault-policy-int-minion-cached-1", defaults={"open_mode": True, "grains": {"foo": "bar"}}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All diff --git a/tests/pytests/unit/crypt/test_crypt.py b/tests/pytests/unit/crypt/test_crypt.py index e3c98ab6366..8ff8fcb8d14 100644 --- a/tests/pytests/unit/crypt/test_crypt.py +++ b/tests/pytests/unit/crypt/test_crypt.py @@ -12,6 +12,7 @@ import pytest import salt.crypt import salt.master import salt.utils.files +from tests.conftest import FIPS_TESTRUN PRIV_KEY = """ -----BEGIN RSA PRIVATE KEY----- @@ -152,6 +153,7 @@ def test_cryptical_dumps_invalid_nonce(): assert master_crypt.loads(ret, nonce="abcde") +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") def test_verify_signature(tmp_path): tmp_path.joinpath("foo.pem").write_text(PRIV_KEY.strip()) tmp_path.joinpath("foo.pub").write_text(PUB_KEY.strip()) @@ -162,6 +164,7 @@ def test_verify_signature(tmp_path): assert salt.crypt.verify_signature(str(tmp_path.joinpath("foo.pub")), msg, sig) +@pytest.mark.skipif(FIPS_TESTRUN, reason="Legacy key can not be loaded in FIPS mode") def test_verify_signature_bad_sig(tmp_path): tmp_path.joinpath("foo.pem").write_text(PRIV_KEY.strip()) tmp_path.joinpath("foo.pub").write_text(PUB_KEY.strip()) diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index c5196372b32..a67ae52bbb9 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -9,6 +9,7 @@ import textwrap import pytest import salt.config +import salt.crypt import salt.minion import salt.syspaths import salt.utils.files @@ -1797,6 +1798,11 @@ class ConfigTestCase(TestCase, AdaptedConfigurationTestCaseMixin): "worker_threads": 5, "hash_type": "sha256", "log_file": "foo.log", + # Crypto config for minion + "encryption_algorithm": salt.crypt.OAEP_SHA1, + "signing_algorithm": salt.crypt.PKCS1v15_SHA1, + # Crypto config for master + "publish_signing_algorithm": salt.crypt.PKCS1v15_SHA1, } ret.update(kwargs) return ret diff --git a/tests/unit/test_crypt.py b/tests/unit/test_crypt.py index 89ec83294b9..cd41442ae1f 100644 --- a/tests/unit/test_crypt.py +++ b/tests/unit/test_crypt.py @@ -116,22 +116,6 @@ class TestBadCryptodomePubKey(TestCase): def tearDown(self): shutil.rmtree(self.test_dir) - @pytest.mark.skipif(not HAS_M2, reason="Skip when m2crypto is not installed") - def test_m2_bad_key(self): - """ - Load public key with an invalid header using m2crypto and validate it - """ - key = salt.crypt.get_rsa_pub_key(self.key_path) - assert key.check_key() == 1 - - @pytest.mark.skipif(HAS_M2, reason="Skip when m2crypto is installed") - def test_crypto_bad_key(self): - """ - Load public key with an invalid header and validate it without m2crypto - """ - key = salt.crypt.get_rsa_pub_key(self.key_path) - assert key.can_encrypt() - class TestM2CryptoRegression47124(TestCase): From 20312c732d4e3ae91c3e452d8ad5dd1f4fe50f2b Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 26 May 2024 23:46:31 -0700 Subject: [PATCH 087/160] Add cryptography to Salt versions report --- salt/version.py | 1 + 1 file changed, 1 insertion(+) diff --git a/salt/version.py b/salt/version.py index 53522e3f9a2..c7d33f29e9f 100644 --- a/salt/version.py +++ b/salt/version.py @@ -697,6 +697,7 @@ def dependency_information(include_salt_cloud=False): ("msgpack-pure", "msgpack_pure", "version"), ("pycrypto", "Crypto", "__version__"), ("pycryptodome", "Cryptodome", "version_info"), + ("cryptography", "cryptography", "__version__"), ("PyYAML", "yaml", "__version__"), ("PyZMQ", "zmq", "__version__"), ("ZMQ", "zmq", "zmq_version"), From 09e09741d420d58accf2ccd1355085d73707a717 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 26 May 2024 23:50:30 -0700 Subject: [PATCH 088/160] Skip tests that will not succeed when FIPS provider is used --- tests/pytests/integration/runners/test_vault.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/tests/pytests/integration/runners/test_vault.py b/tests/pytests/integration/runners/test_vault.py index f14df4f70b3..f7253011397 100644 --- a/tests/pytests/integration/runners/test_vault.py +++ b/tests/pytests/integration/runners/test_vault.py @@ -269,6 +269,9 @@ class TestVaultPillarPolicyTemplatesWithoutCache: ) assert ret.data == ["salt_minion", f"salt_minion_{pillar_salt_minion.id}"] + @pytest.mark.skipif( + FIPS_TESTRUN, reason="Signing with SHA1 not supported in FIPS mode." + ) def test_policy_compilation_prevents_loop_for_execution_module( self, pillar_salt_run_cli, @@ -290,6 +293,9 @@ class TestVaultPillarPolicyTemplatesWithoutCache: assert "Pillar render error: Rendering SLS 'exe_loop' failed" in ret.stderr assert "Cyclic dependency detected while refreshing pillar" in ret.stderr + @pytest.mark.skipif( + FIPS_TESTRUN, reason="Signing with SHA1 not supported in FIPS mode." + ) def test_policy_compilation_prevents_loop_for_sdb_module( self, pillar_salt_run_cli, From 58fe224eeb92b5fdf1ef13c2663b52e182cddc4d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 27 May 2024 00:26:02 -0700 Subject: [PATCH 089/160] More fips test fixes --- tests/pytests/conftest.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 985e34e0816..ad7c6a9f035 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -383,6 +383,9 @@ def salt_proxy_factory(salt_master_factory): config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_master_factory.salt_proxy_minion_daemon( @@ -421,9 +424,15 @@ def salt_delta_proxy_factory(salt_factories, salt_master_factory): "metaproxy": "deltaproxy", "master": "127.0.0.1", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = salt_master_factory.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=240, ) @@ -450,9 +459,16 @@ def temp_salt_master( "open_mode": True, "transport": request.config.getoption("--transport"), } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( random_string("temp-master-"), defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], ) return factory @@ -464,9 +480,15 @@ def temp_salt_minion(temp_salt_master): "open_mode": True, "transport": temp_salt_master.config["transport"], } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = temp_salt_master.salt_minion_daemon( random_string("temp-minion-"), defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], ) factory.after_terminate( From db823848f40ff374cdd427e4dd5a35107f71f923 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 28 May 2024 00:33:59 -0700 Subject: [PATCH 090/160] Attempt windows fix --- salt/channel/server.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index ca5de7bf2d5..de190987a5b 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -53,10 +53,10 @@ class ReqServerChannel: def __init__(self, opts, transport): self.opts = opts self.transport = transport - self.event = salt.utils.event.get_master_event( - self.opts, self.opts["sock_dir"], listen=False - ) - self.master_key = salt.crypt.MasterKeys(self.opts) + # self.event = salt.utils.event.get_master_event( + # self.opts, self.opts["sock_dir"], listen=False + # ) + # self.master_key = salt.crypt.MasterKeys(self.opts) def pre_fork(self, process_manager): """ From 1d0fcee9c152af429cbcba035ada0c6f10663199 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 28 May 2024 14:49:06 -0700 Subject: [PATCH 091/160] Fix tests for fips runs --- salt/channel/server.py | 1 + .../integration/pillar/cache/conftest.py | 18 ++++++++++++- .../integration/states/test_x509_v2.py | 21 ++++++++++++++- tests/pytests/scenarios/swarm/conftest.py | 9 +++++++ tests/pytests/unit/transport/test_zeromq.py | 26 ++++++++++++++++++- 5 files changed, 72 insertions(+), 3 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index de190987a5b..a43b3addd71 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -53,6 +53,7 @@ class ReqServerChannel: def __init__(self, opts, transport): self.opts = opts self.transport = transport + self.event = None # self.event = salt.utils.event.get_master_event( # self.opts, self.opts["sock_dir"], listen=False # ) diff --git a/tests/pytests/integration/pillar/cache/conftest.py b/tests/pytests/integration/pillar/cache/conftest.py index 54850985575..0998ed4801d 100644 --- a/tests/pytests/integration/pillar/cache/conftest.py +++ b/tests/pytests/integration/pillar/cache/conftest.py @@ -2,6 +2,8 @@ import shutil import pytest +from tests.conftest import FIPS_TESTRUN + @pytest.fixture(scope="package") def pillar_state_tree(tmp_path_factory): @@ -22,8 +24,16 @@ def pillar_salt_master(salt_factories, pillar_state_tree): {"extra_minion_data_in_pillar": "*"}, ], } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( - "pillar-cache-functional-master", defaults=config_defaults + "pillar-cache-functional-master", + defaults=config_defaults, + overrides=config_overrides, ) with factory.started(): yield factory @@ -32,9 +42,15 @@ def pillar_salt_master(salt_factories, pillar_state_tree): @pytest.fixture(scope="package") def pillar_salt_minion(pillar_salt_master): assert pillar_salt_master.is_running() + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = pillar_salt_master.salt_minion_daemon( "pillar-cache-functional-minion-1", defaults={"open_mode": True, "hi": "there", "pass_to_ext_pillars": ["hi"]}, + overrides=config_overrides, ) with factory.started(): # Sync All diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index 51418fb230e..bbf27302a26 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -11,6 +11,7 @@ import pytest from saltfactories.utils import random_string import salt.utils.x509 as x509util +from tests.conftest import FIPS_TESTRUN try: import cryptography @@ -123,8 +124,14 @@ def x509_data( @pytest.fixture(scope="module") def x509_salt_master(salt_factories, ca_minion_id, x509_master_config): + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( - "x509-master", defaults=x509_master_config + "x509-master", defaults=x509_master_config, overrides=config_overrides ) with factory.started(): yield factory @@ -184,9 +191,15 @@ def ca_minion_config(x509_minion_id, ca_cert, ca_key_enc, rsa_privkey, ca_new_ce @pytest.fixture(scope="module", autouse=True) def x509ca_salt_minion(x509_salt_master, ca_minion_id, ca_minion_config): assert x509_salt_master.is_running() + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = x509_salt_master.salt_minion_daemon( ca_minion_id, defaults=ca_minion_config, + overrides=config_overrides, ) with factory.started(): # Sync All @@ -199,6 +212,11 @@ def x509ca_salt_minion(x509_salt_master, ca_minion_id, ca_minion_config): @pytest.fixture(scope="module") def x509_salt_minion(x509_salt_master, x509_minion_id): assert x509_salt_master.is_running() + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = x509_salt_master.salt_minion_daemon( x509_minion_id, defaults={ @@ -206,6 +224,7 @@ def x509_salt_minion(x509_salt_master, x509_minion_id): "features": {"x509_v2": True}, "grains": {"testgrain": "foo"}, }, + overrides=config_overrides, ) with factory.started(): # Sync All diff --git a/tests/pytests/scenarios/swarm/conftest.py b/tests/pytests/scenarios/swarm/conftest.py index 8147eb8049d..136bb6735c6 100644 --- a/tests/pytests/scenarios/swarm/conftest.py +++ b/tests/pytests/scenarios/swarm/conftest.py @@ -4,12 +4,21 @@ from contextlib import ExitStack import pytest from saltfactories.utils import random_string +from tests.conftest import FIPS_TESTRUN + @pytest.fixture(scope="package") def salt_master_factory(salt_factories): + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( random_string("swarm-master-"), extra_cli_arguments_after_first_start_failure=["--log-level=info"], + overrides=config_overrides, ) return factory diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index b9de5f514aa..f3c9b811e7d 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -1061,7 +1061,6 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key( "ver": "2", "cmd": "_pillar", } - try: with pytest.raises(salt.crypt.AuthenticationError) as excinfo: await client.crypted_transfer_decode_dictentry( @@ -1104,8 +1103,12 @@ async def test_req_serv_auth_v1(pki_dir, minion_opts, master_opts): } master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) pub = salt.crypt.get_rsa_pub_key(str(pki_dir.joinpath("minion", "minion.pub"))) @@ -1163,6 +1166,9 @@ async def test_req_serv_auth_v2(pki_dir, minion_opts, master_opts): server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) pub = salt.crypt.get_rsa_pub_key(str(pki_dir.joinpath("minion", "minion.pub"))) @@ -1224,6 +1230,9 @@ async def test_req_chan_auth_v2(pki_dir, io_loop, minion_opts, master_opts): server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) minion_opts["verify_master_pubkey_sign"] = False minion_opts["always_verify_signature"] = False @@ -1280,6 +1289,9 @@ async def test_req_chan_auth_v2_with_master_signing( server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) minion_opts["verify_master_pubkey_sign"] = True minion_opts["always_verify_signature"] = True @@ -1319,6 +1331,9 @@ async def test_req_chan_auth_v2_with_master_signing( server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) signin_payload = client.auth.minion_sign_in_payload() @@ -1374,6 +1389,9 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub( server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) minion_opts["verify_master_pubkey_sign"] = False minion_opts["always_verify_signature"] = False @@ -1437,6 +1455,9 @@ async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig( server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) minion_opts["verify_master_pubkey_sign"] = False minion_opts["always_verify_signature"] = False @@ -1494,6 +1515,9 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub( server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) minion_opts["verify_master_pubkey_sign"] = False minion_opts["always_verify_signature"] = False From 96395966dad8a1ec8284dbb23703bf6219b81dbd Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 29 May 2024 01:40:10 -0700 Subject: [PATCH 092/160] Fix scenarios tests under FIPS --- tests/pytests/scenarios/blackout/conftest.py | 26 +++++++++++++++++-- tests/pytests/scenarios/compat/conftest.py | 5 ++++ .../scenarios/compat/test_with_versions.py | 8 ++++++ tests/pytests/scenarios/daemons/conftest.py | 6 +++++ tests/pytests/scenarios/dns/conftest.py | 9 +++++++ .../scenarios/dns/multimaster/conftest.py | 13 ++++++++++ .../failover/multimaster/conftest.py | 15 +++++++++++ .../multimaster/test_failover_master.py | 5 ++++ .../pytests/scenarios/multimaster/conftest.py | 1 - tests/pytests/scenarios/reauth/conftest.py | 13 ++++++++++ tests/pytests/scenarios/swarm/conftest.py | 9 +++++++ 11 files changed, 107 insertions(+), 3 deletions(-) diff --git a/tests/pytests/scenarios/blackout/conftest.py b/tests/pytests/scenarios/blackout/conftest.py index a75c20f30ed..0e872ae34d4 100644 --- a/tests/pytests/scenarios/blackout/conftest.py +++ b/tests/pytests/scenarios/blackout/conftest.py @@ -5,6 +5,8 @@ import time import attr import pytest +from tests.conftest import FIPS_TESTRUN + @attr.s class BlackoutPillar: @@ -126,9 +128,17 @@ def salt_master(salt_factories, pillar_state_tree): "pillar_roots": {"base": [str(pillar_state_tree)]}, "open_mode": True, } + config_overrides = { + "interface": "127.0.0.1", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + } factory = salt_factories.salt_master_daemon( "blackout-master", defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], ) with factory.started(): @@ -138,7 +148,13 @@ def salt_master(salt_factories, pillar_state_tree): @pytest.fixture(scope="package") def salt_minion_1(salt_master): factory = salt_master.salt_minion_daemon( - "blackout-minion-1", defaults={"open_mode": True} + "blackout-minion-1", + defaults={"open_mode": True}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): yield factory @@ -147,7 +163,13 @@ def salt_minion_1(salt_master): @pytest.fixture(scope="package") def salt_minion_2(salt_master): factory = salt_master.salt_minion_daemon( - "blackout-minion-2", defaults={"open_mode": True} + "blackout-minion-2", + defaults={"open_mode": True}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): yield factory diff --git a/tests/pytests/scenarios/compat/conftest.py b/tests/pytests/scenarios/compat/conftest.py index e42c4c9259a..899f35f2cdf 100644 --- a/tests/pytests/scenarios/compat/conftest.py +++ b/tests/pytests/scenarios/compat/conftest.py @@ -14,6 +14,7 @@ from saltfactories.daemons.container import Container from saltfactories.utils import random_string import salt.utils.path +from tests.conftest import FIPS_TESTRUN from tests.support.runtests import RUNTIME_VARS from tests.support.sminion import create_sminion @@ -135,6 +136,10 @@ def salt_master( "log_level_logfile": "quiet", # We also want to scrutinize the key acceptance "open_mode": False, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } # We need to copy the extension modules into the new master root_dir or diff --git a/tests/pytests/scenarios/compat/test_with_versions.py b/tests/pytests/scenarios/compat/test_with_versions.py index cc61a01f509..d79a608e0f5 100644 --- a/tests/pytests/scenarios/compat/test_with_versions.py +++ b/tests/pytests/scenarios/compat/test_with_versions.py @@ -13,6 +13,7 @@ from saltfactories.daemons.container import SaltMinion from saltfactories.utils import random_string import salt.utils.platform +from tests.conftest import FIPS_TESTRUN from tests.support.runtests import RUNTIME_VARS docker = pytest.importorskip("docker") @@ -77,6 +78,9 @@ def salt_minion( }, # We also want to scrutinize the key acceptance "open_mode": False, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_master.salt_minion_daemon( minion_id, @@ -148,12 +152,14 @@ def populated_state_tree(minion_id, package_name, state_tree): yield +@pytest.mark.skip_on_fips_enabled_platform def test_ping(salt_cli, salt_minion): ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id) assert ret.returncode == 0, ret assert ret.data is True +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.usefixtures("populated_state_tree") def test_highstate(salt_cli, salt_minion, package_name): """ @@ -167,6 +173,7 @@ def test_highstate(salt_cli, salt_minion, package_name): assert package_name in state_return["changes"], state_return +@pytest.mark.skip_on_fips_enabled_platform @pytest.fixture def cp_file_source(): source = pathlib.Path(RUNTIME_VARS.BASE_FILES) / "cheese" @@ -175,6 +182,7 @@ def cp_file_source(): yield pathlib.Path(temp_file) +@pytest.mark.skip_on_fips_enabled_platform def test_cp(salt_cp_cli, salt_minion, artifacts_path, cp_file_source): """ Assert proper behaviour for salt-cp with a newer master and older minions. diff --git a/tests/pytests/scenarios/daemons/conftest.py b/tests/pytests/scenarios/daemons/conftest.py index 2433376d34c..94695d73e4a 100644 --- a/tests/pytests/scenarios/daemons/conftest.py +++ b/tests/pytests/scenarios/daemons/conftest.py @@ -1,6 +1,8 @@ import pytest from saltfactories.utils import random_string +from tests.conftest import FIPS_TESTRUN + @pytest.fixture(scope="package") def salt_master_factory(request, salt_factories): @@ -10,6 +12,10 @@ def salt_master_factory(request, salt_factories): } config_overrides = { "interface": "127.0.0.1", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } return salt_factories.salt_master_daemon( diff --git a/tests/pytests/scenarios/dns/conftest.py b/tests/pytests/scenarios/dns/conftest.py index 254e8ee9a28..ae3f0f410f5 100644 --- a/tests/pytests/scenarios/dns/conftest.py +++ b/tests/pytests/scenarios/dns/conftest.py @@ -4,6 +4,8 @@ import subprocess import pytest +from tests.conftest import FIPS_TESTRUN + log = logging.getLogger(__name__) @@ -53,6 +55,10 @@ def master(request, salt_factories): } config_overrides = { "interface": "0.0.0.0", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } factory = salt_factories.salt_master_daemon( "master", @@ -84,6 +90,9 @@ def minion(master, master_alive_interval): "master": f"master.local:{port}", "publish_port": master.config["publish_port"], "master_alive_interval": master_alive_interval, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = master.salt_minion_daemon( "minion", diff --git a/tests/pytests/scenarios/dns/multimaster/conftest.py b/tests/pytests/scenarios/dns/multimaster/conftest.py index 3b50ed65c60..6951e51bf93 100644 --- a/tests/pytests/scenarios/dns/multimaster/conftest.py +++ b/tests/pytests/scenarios/dns/multimaster/conftest.py @@ -5,6 +5,8 @@ import subprocess import pytest +from tests.conftest import FIPS_TESTRUN + log = logging.getLogger(__name__) @@ -20,6 +22,10 @@ def salt_mm_master_1(request, salt_factories): config_overrides = { "interface": "0.0.0.0", "master_sign_pubkey": True, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } factory = salt_factories.salt_master_daemon( "mm-master-1", @@ -55,6 +61,10 @@ def salt_mm_master_2(salt_factories, salt_mm_master_1): config_overrides = { "interface": "0.0.0.0", "master_sign_pubkey": True, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } # Use the same ports for both masters, they are binding to different interfaces @@ -103,6 +113,9 @@ def salt_mm_minion_1(salt_mm_master_1, salt_mm_master_2, master_alive_interval): "master_tries": -1, "verify_master_pubkey_sign": True, "retry_dns": True, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_mm_master_1.salt_minion_daemon( "mm-minion-1", diff --git a/tests/pytests/scenarios/failover/multimaster/conftest.py b/tests/pytests/scenarios/failover/multimaster/conftest.py index 970c1e59137..166e164c13b 100644 --- a/tests/pytests/scenarios/failover/multimaster/conftest.py +++ b/tests/pytests/scenarios/failover/multimaster/conftest.py @@ -8,6 +8,7 @@ import pytest from pytestshellutils.exceptions import FactoryNotStarted, FactoryTimeout import salt.utils.platform +from tests.conftest import FIPS_TESTRUN log = logging.getLogger(__name__) @@ -21,6 +22,10 @@ def salt_mm_failover_master_1(request, salt_factories): config_overrides = { "interface": "127.0.0.1", "master_sign_pubkey": True, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } factory = salt_factories.salt_master_daemon( "mm-failover-master-1", @@ -49,6 +54,10 @@ def salt_mm_failover_master_2(salt_factories, salt_mm_failover_master_1): config_overrides = { "interface": "127.0.0.2", "master_sign_pubkey": True, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } # Use the same ports for both masters, they are binding to different interfaces @@ -100,6 +109,9 @@ def salt_mm_failover_minion_1(salt_mm_failover_master_1, salt_mm_failover_master "master_tries": -1, "verify_master_pubkey_sign": True, "retry_dns": 1, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_mm_failover_master_1.salt_minion_daemon( "mm-failover-minion-1", @@ -138,6 +150,9 @@ def salt_mm_failover_minion_2(salt_mm_failover_master_1, salt_mm_failover_master "master_tries": -1, "verify_master_pubkey_sign": True, "retry_dns": 1, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_mm_failover_master_2.salt_minion_daemon( "mm-failover-minion-2", diff --git a/tests/pytests/scenarios/failover/multimaster/test_failover_master.py b/tests/pytests/scenarios/failover/multimaster/test_failover_master.py index 84ab7548ff4..e996469789c 100644 --- a/tests/pytests/scenarios/failover/multimaster/test_failover_master.py +++ b/tests/pytests/scenarios/failover/multimaster/test_failover_master.py @@ -5,6 +5,8 @@ import time import pytest +from tests.conftest import FIPS_TESTRUN + pytestmark = [ pytest.mark.core_test, pytest.mark.skip_on_freebsd(reason="Processes are not properly killed on FreeBSD"), @@ -36,6 +38,9 @@ def test_pki(salt_mm_failover_master_1, salt_mm_failover_master_2, caplog): "master_alive_interval": 5, "master_tries": -1, "verify_master_pubkey_sign": True, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_mm_failover_master_1.salt_minion_daemon( "mm-failover-pki-minion-1", diff --git a/tests/pytests/scenarios/multimaster/conftest.py b/tests/pytests/scenarios/multimaster/conftest.py index 5a09a5ecd7b..77654f05460 100644 --- a/tests/pytests/scenarios/multimaster/conftest.py +++ b/tests/pytests/scenarios/multimaster/conftest.py @@ -26,7 +26,6 @@ def salt_mm_master_1(request, salt_factories): "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" ), } - factory = salt_factories.salt_master_daemon( "mm-master-1", defaults=config_defaults, diff --git a/tests/pytests/scenarios/reauth/conftest.py b/tests/pytests/scenarios/reauth/conftest.py index bbefa71e181..35a7003a585 100644 --- a/tests/pytests/scenarios/reauth/conftest.py +++ b/tests/pytests/scenarios/reauth/conftest.py @@ -1,12 +1,20 @@ import pytest from saltfactories.utils import random_string +from tests.conftest import FIPS_TESTRUN + @pytest.fixture(scope="package") def salt_master_factory(salt_factories): factory = salt_factories.salt_master_daemon( random_string("reauth-master-"), extra_cli_arguments_after_first_start_failure=["--log-level=info"], + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + }, ) return factory @@ -22,6 +30,11 @@ def salt_minion_factory(salt_master): factory = salt_master.salt_minion_daemon( random_string("reauth-minion-"), extra_cli_arguments_after_first_start_failure=["--log-level=info"], + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) return factory diff --git a/tests/pytests/scenarios/swarm/conftest.py b/tests/pytests/scenarios/swarm/conftest.py index 136bb6735c6..b5a4d473138 100644 --- a/tests/pytests/scenarios/swarm/conftest.py +++ b/tests/pytests/scenarios/swarm/conftest.py @@ -64,6 +64,15 @@ def minion_swarm(salt_master, minion_count): minion_factory = salt_master.salt_minion_daemon( random_string(f"swarm-minion-{idx}-"), extra_cli_arguments_after_first_start_failure=["--log-level=info"], + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": ( + "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1" + ), + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) stack.enter_context(minion_factory.started()) minions.append(minion_factory) From 63bab25e1cdcb987c74cca903598a939a604850c Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 29 May 2024 14:42:42 -0700 Subject: [PATCH 093/160] Fix more FIPS tests --- .../integration/cli/test_salt_deltaproxy.py | 37 +++++++++++++++++++ tests/pytests/integration/conftest.py | 3 ++ .../integration/minion/test_job_return.py | 12 ++++++ .../pytests/integration/minion/test_reauth.py | 17 ++++++++- .../integration/minion/test_return_retries.py | 12 ++++++ tests/pytests/pkg/conftest.py | 7 +++- 6 files changed, 85 insertions(+), 3 deletions(-) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index 1d8eea0e997..8c04fc2f61a 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -10,6 +10,7 @@ from pytestshellutils.exceptions import FactoryNotStarted from saltfactories.utils import random_string import salt.defaults.exitcodes +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import PRE_PYTEST_SKIP_REASON log = logging.getLogger(__name__) @@ -129,6 +130,11 @@ def test_exit_status_correct_usage( config_defaults = { "metaproxy": "deltaproxy", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } proxy_one = "dummy_proxy_one" proxy_two = "dummy_proxy_two" @@ -184,6 +190,7 @@ def test_exit_status_correct_usage( factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=320, ) @@ -246,6 +253,11 @@ def test_missing_pillar_file( config_defaults = { "metaproxy": "deltaproxy", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } proxy_one = "dummy_proxy_one" proxy_two = "dummy_proxy_two" @@ -287,6 +299,7 @@ def test_missing_pillar_file( factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=320, ) @@ -344,6 +357,11 @@ def test_invalid_connection( config_defaults = { "metaproxy": "deltaproxy", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } proxy_one = "dummy_proxy_one" broken_proxy_one = "broken_proxy_one" broken_proxy_two = "broken_proxy_two" @@ -415,6 +433,7 @@ def test_invalid_connection( factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=320, ) @@ -474,6 +493,11 @@ def test_custom_proxy_module( config_defaults = { "metaproxy": "deltaproxy", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } proxy_one = "custom_dummy_proxy_one" proxy_two = "custom_dummy_proxy_two" @@ -548,6 +572,7 @@ def ping(): factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=320, ) @@ -611,6 +636,11 @@ def test_custom_proxy_module_raise_exception( config_defaults = { "metaproxy": "deltaproxy", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } proxy_one = "custom_dummy_proxy_one" proxy_two = "custom_dummy_proxy_two" @@ -685,6 +715,7 @@ def ping(): factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=320, ) @@ -750,6 +781,11 @@ def test_exit_status_correct_usage_large_number_of_minions( config_defaults = { "metaproxy": "deltaproxy", } + config_overrides = { + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } sub_proxies = [ "proxy_one", "proxy_two", @@ -826,6 +862,7 @@ def test_exit_status_correct_usage_large_number_of_minions( factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=320, ) diff --git a/tests/pytests/integration/conftest.py b/tests/pytests/integration/conftest.py index e505e55a20a..096bc28e0f2 100644 --- a/tests/pytests/integration/conftest.py +++ b/tests/pytests/integration/conftest.py @@ -17,6 +17,9 @@ def salt_master(salt_master_factory): """ A running salt-master fixture """ + print("*" * 80) + print(repr(salt_master_factory)) + print("*" * 80) with salt_master_factory.started(): yield salt_master_factory diff --git a/tests/pytests/integration/minion/test_job_return.py b/tests/pytests/integration/minion/test_job_return.py index dc345eb2771..5200af8c20c 100644 --- a/tests/pytests/integration/minion/test_job_return.py +++ b/tests/pytests/integration/minion/test_job_return.py @@ -5,6 +5,7 @@ import subprocess import pytest import salt.utils.platform +from tests.conftest import FIPS_TESTRUN @pytest.fixture @@ -15,6 +16,10 @@ def salt_master_1(request, salt_factories): } config_overrides = { "interface": "127.0.0.1", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } factory = salt_factories.salt_master_daemon( @@ -38,6 +43,10 @@ def salt_master_2(salt_factories, salt_master_1): } config_overrides = { "interface": "127.0.0.2", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } # Use the same ports for both masters, they are binding to different interfaces @@ -80,6 +89,9 @@ def salt_minion_1(salt_master_1, salt_master_2): f"{master_2_addr}:{master_2_port}", ], "test.foo": "baz", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_master_1.salt_minion_daemon( "minion-1", diff --git a/tests/pytests/integration/minion/test_reauth.py b/tests/pytests/integration/minion/test_reauth.py index 24b256502e8..177f5ed21f7 100644 --- a/tests/pytests/integration/minion/test_reauth.py +++ b/tests/pytests/integration/minion/test_reauth.py @@ -1,5 +1,7 @@ import time +from tests.conftest import FIPS_TESTRUN + def test_reauth(salt_master_factory, event_listener): """ @@ -23,12 +25,23 @@ def test_reauth(salt_master_factory, event_listener): event_listener.register_auth_event_handler("test_reauth-master", handler) master = salt_master_factory.salt_master_daemon( "test_reauth-master", - overrides={"log_level": "info"}, + overrides={ + "log_level": "info", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), + }, ) sls_tempfile = master.state_tree.base.temp_file(f"{sls_name}.sls", sls_contents) minion = master.salt_minion_daemon( "test_reauth-minion", - overrides={"log_level": "info"}, + overrides={ + "log_level": "info", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) cli = master.salt_cli() start_time = time.time() diff --git a/tests/pytests/integration/minion/test_return_retries.py b/tests/pytests/integration/minion/test_return_retries.py index e6f4efe96b0..321387f5692 100644 --- a/tests/pytests/integration/minion/test_return_retries.py +++ b/tests/pytests/integration/minion/test_return_retries.py @@ -4,6 +4,7 @@ import pytest from saltfactories.utils import random_string import salt.utils.files +from tests.conftest import FIPS_TESTRUN @pytest.fixture(scope="function") @@ -13,6 +14,10 @@ def salt_minion_retry(salt_master, salt_minion_id): "return_retry_timer_max": 0, "return_retry_timer": 5, "return_retry_tries": 30, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } factory = salt_master.salt_minion_daemon( random_string("retry-minion-"), @@ -71,11 +76,18 @@ def test_pillar_timeout(salt_master_factory, tmp_path): "worker_threads": 2, "peer": True, "minion_data_cache": False, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), } minion_overrides = { "auth_timeout": 20, "request_channel_timeout": 5, "request_channel_tries": 1, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } sls_name = "issue-50221" sls_contents = """ diff --git a/tests/pytests/pkg/conftest.py b/tests/pytests/pkg/conftest.py index 5bcd544c119..8d2555e73d8 100644 --- a/tests/pytests/pkg/conftest.py +++ b/tests/pytests/pkg/conftest.py @@ -12,7 +12,7 @@ from saltfactories.utils import random_string import salt.config import salt.utils.files -from tests.conftest import CODE_DIR +from tests.conftest import CODE_DIR, FIPS_TESTRUN from tests.support.pkg import ApiRequest, SaltMaster, SaltMasterWindows, SaltPkgInstall log = logging.getLogger(__name__) @@ -298,6 +298,9 @@ def salt_master(salt_factories, install_salt, pkg_tests_account): }, }, "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + ), "open_mode": True, } salt_user_in_config_file = False @@ -449,6 +452,8 @@ def salt_minion(salt_factories, salt_master, install_salt): "file_roots": salt_master.config["file_roots"].copy(), "pillar_roots": salt_master.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", "open_mode": True, } if platform.is_windows(): From a5787031b09114295c8112f1c18db03851c83c9e Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 29 May 2024 15:19:49 -0700 Subject: [PATCH 094/160] Fix publish_signing_algorithm conditional --- tests/pytests/conftest.py | 4 ++-- tests/pytests/functional/channel/test_server.py | 4 ++-- tests/pytests/integration/minion/test_job_return.py | 4 ++-- tests/pytests/integration/minion/test_reauth.py | 2 +- tests/pytests/integration/minion/test_return_retries.py | 4 ++-- tests/pytests/integration/pillar/cache/conftest.py | 2 +- tests/pytests/integration/runners/test_vault.py | 4 ++-- tests/pytests/integration/states/test_x509_v2.py | 2 +- tests/pytests/pkg/conftest.py | 2 +- tests/pytests/scenarios/blackout/conftest.py | 2 +- tests/pytests/scenarios/compat/conftest.py | 2 +- tests/pytests/scenarios/daemons/conftest.py | 2 +- tests/pytests/scenarios/dns/conftest.py | 2 +- tests/pytests/scenarios/dns/multimaster/conftest.py | 4 ++-- tests/pytests/scenarios/failover/multimaster/conftest.py | 4 ++-- tests/pytests/scenarios/multimaster/conftest.py | 4 ++-- tests/pytests/scenarios/reauth/conftest.py | 2 +- tests/pytests/scenarios/swarm/conftest.py | 2 +- 18 files changed, 26 insertions(+), 26 deletions(-) diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index ad7c6a9f035..542e742aa9f 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -192,7 +192,7 @@ def salt_master_factory( "pytest-master": {"log": {"level": "DEBUG"}}, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } ext_pillar = [] @@ -462,7 +462,7 @@ def temp_salt_master( config_overrides = { "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( diff --git a/tests/pytests/functional/channel/test_server.py b/tests/pytests/functional/channel/test_server.py index 277a5a58bdc..e05e7e38937 100644 --- a/tests/pytests/functional/channel/test_server.py +++ b/tests/pytests/functional/channel/test_server.py @@ -75,7 +75,7 @@ def master_config(root_dir, transport): pki_dir=str(root_dir / "pki"), fips_mode=FIPS_TESTRUN, publish_signing_algorithm=( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), ) # master_conf["transport"] = transport @@ -88,7 +88,7 @@ def master_config(root_dir, transport): # master_conf["pki_dir"] = str(root_dir / "pki") # "fips_mode": FIPS_TESTRUN, # "publish_signing_algorithm": ( - # "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + # "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" # ), os.makedirs(master_conf["pki_dir"]) salt.crypt.gen_keys(master_conf["pki_dir"], "master", 4096) diff --git a/tests/pytests/integration/minion/test_job_return.py b/tests/pytests/integration/minion/test_job_return.py index 5200af8c20c..c91748597dc 100644 --- a/tests/pytests/integration/minion/test_job_return.py +++ b/tests/pytests/integration/minion/test_job_return.py @@ -18,7 +18,7 @@ def salt_master_1(request, salt_factories): "interface": "127.0.0.1", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } @@ -45,7 +45,7 @@ def salt_master_2(salt_factories, salt_master_1): "interface": "127.0.0.2", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } diff --git a/tests/pytests/integration/minion/test_reauth.py b/tests/pytests/integration/minion/test_reauth.py index 177f5ed21f7..2e9962a087c 100644 --- a/tests/pytests/integration/minion/test_reauth.py +++ b/tests/pytests/integration/minion/test_reauth.py @@ -29,7 +29,7 @@ def test_reauth(salt_master_factory, event_listener): "log_level": "info", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), }, ) diff --git a/tests/pytests/integration/minion/test_return_retries.py b/tests/pytests/integration/minion/test_return_retries.py index 321387f5692..03f6146fb6c 100644 --- a/tests/pytests/integration/minion/test_return_retries.py +++ b/tests/pytests/integration/minion/test_return_retries.py @@ -16,7 +16,7 @@ def salt_minion_retry(salt_master, salt_minion_id): "return_retry_tries": 30, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_master.salt_minion_daemon( @@ -78,7 +78,7 @@ def test_pillar_timeout(salt_master_factory, tmp_path): "minion_data_cache": False, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } minion_overrides = { diff --git a/tests/pytests/integration/pillar/cache/conftest.py b/tests/pytests/integration/pillar/cache/conftest.py index 0998ed4801d..ca4ba16b2ba 100644 --- a/tests/pytests/integration/pillar/cache/conftest.py +++ b/tests/pytests/integration/pillar/cache/conftest.py @@ -27,7 +27,7 @@ def pillar_salt_master(salt_factories, pillar_state_tree): config_overrides = { "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( diff --git a/tests/pytests/integration/runners/test_vault.py b/tests/pytests/integration/runners/test_vault.py index f7253011397..f628d7cea90 100644 --- a/tests/pytests/integration/runners/test_vault.py +++ b/tests/pytests/integration/runners/test_vault.py @@ -52,7 +52,7 @@ def pillar_salt_master(salt_factories, pillar_state_tree): config_overrides = { "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( @@ -85,7 +85,7 @@ def pillar_caching_salt_master(salt_factories, pillar_state_tree): config_overrides = { "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index bbf27302a26..ad28feeaaa8 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -127,7 +127,7 @@ def x509_salt_master(salt_factories, ca_minion_id, x509_master_config): config_overrides = { "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( diff --git a/tests/pytests/pkg/conftest.py b/tests/pytests/pkg/conftest.py index 8d2555e73d8..59d02c3a60d 100644 --- a/tests/pytests/pkg/conftest.py +++ b/tests/pytests/pkg/conftest.py @@ -299,7 +299,7 @@ def salt_master(salt_factories, install_salt, pkg_tests_account): }, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), "open_mode": True, } diff --git a/tests/pytests/scenarios/blackout/conftest.py b/tests/pytests/scenarios/blackout/conftest.py index 0e872ae34d4..52e21b79504 100644 --- a/tests/pytests/scenarios/blackout/conftest.py +++ b/tests/pytests/scenarios/blackout/conftest.py @@ -132,7 +132,7 @@ def salt_master(salt_factories, pillar_state_tree): "interface": "127.0.0.1", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( diff --git a/tests/pytests/scenarios/compat/conftest.py b/tests/pytests/scenarios/compat/conftest.py index 899f35f2cdf..46e11d40bdf 100644 --- a/tests/pytests/scenarios/compat/conftest.py +++ b/tests/pytests/scenarios/compat/conftest.py @@ -138,7 +138,7 @@ def salt_master( "open_mode": False, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } diff --git a/tests/pytests/scenarios/daemons/conftest.py b/tests/pytests/scenarios/daemons/conftest.py index 94695d73e4a..634314a2a9d 100644 --- a/tests/pytests/scenarios/daemons/conftest.py +++ b/tests/pytests/scenarios/daemons/conftest.py @@ -14,7 +14,7 @@ def salt_master_factory(request, salt_factories): "interface": "127.0.0.1", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } diff --git a/tests/pytests/scenarios/dns/conftest.py b/tests/pytests/scenarios/dns/conftest.py index ae3f0f410f5..cfa4efda125 100644 --- a/tests/pytests/scenarios/dns/conftest.py +++ b/tests/pytests/scenarios/dns/conftest.py @@ -57,7 +57,7 @@ def master(request, salt_factories): "interface": "0.0.0.0", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( diff --git a/tests/pytests/scenarios/dns/multimaster/conftest.py b/tests/pytests/scenarios/dns/multimaster/conftest.py index 6951e51bf93..cda109d5a16 100644 --- a/tests/pytests/scenarios/dns/multimaster/conftest.py +++ b/tests/pytests/scenarios/dns/multimaster/conftest.py @@ -24,7 +24,7 @@ def salt_mm_master_1(request, salt_factories): "master_sign_pubkey": True, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( @@ -63,7 +63,7 @@ def salt_mm_master_2(salt_factories, salt_mm_master_1): "master_sign_pubkey": True, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } diff --git a/tests/pytests/scenarios/failover/multimaster/conftest.py b/tests/pytests/scenarios/failover/multimaster/conftest.py index 166e164c13b..3c8f89a8ba7 100644 --- a/tests/pytests/scenarios/failover/multimaster/conftest.py +++ b/tests/pytests/scenarios/failover/multimaster/conftest.py @@ -24,7 +24,7 @@ def salt_mm_failover_master_1(request, salt_factories): "master_sign_pubkey": True, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( @@ -56,7 +56,7 @@ def salt_mm_failover_master_2(salt_factories, salt_mm_failover_master_1): "master_sign_pubkey": True, "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } diff --git a/tests/pytests/scenarios/multimaster/conftest.py b/tests/pytests/scenarios/multimaster/conftest.py index 77654f05460..84e7a9a3ceb 100644 --- a/tests/pytests/scenarios/multimaster/conftest.py +++ b/tests/pytests/scenarios/multimaster/conftest.py @@ -23,7 +23,7 @@ def salt_mm_master_1(request, salt_factories): "interface": "127.0.0.1", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( @@ -54,7 +54,7 @@ def salt_mm_master_2(salt_factories, salt_mm_master_1): "interface": "127.0.0.2", "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } diff --git a/tests/pytests/scenarios/reauth/conftest.py b/tests/pytests/scenarios/reauth/conftest.py index 35a7003a585..c91760bc8ca 100644 --- a/tests/pytests/scenarios/reauth/conftest.py +++ b/tests/pytests/scenarios/reauth/conftest.py @@ -12,7 +12,7 @@ def salt_master_factory(salt_factories): overrides={ "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), }, ) diff --git a/tests/pytests/scenarios/swarm/conftest.py b/tests/pytests/scenarios/swarm/conftest.py index b5a4d473138..cd2f50c4124 100644 --- a/tests/pytests/scenarios/swarm/conftest.py +++ b/tests/pytests/scenarios/swarm/conftest.py @@ -12,7 +12,7 @@ def salt_master_factory(salt_factories): config_overrides = { "fips_mode": FIPS_TESTRUN, "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA224" + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), } factory = salt_factories.salt_master_daemon( From 389aac96638f5481ab45cdf496ba9a05376e9c18 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 30 May 2024 17:11:44 -0700 Subject: [PATCH 095/160] More fips test fixes --- .../functional/transport/zeromq/conftest.py | 19 +++++++++- tests/pytests/integration/cli/test_salt.py | 6 +++ .../integration/cli/test_salt_minion.py | 27 +++++++++++++- .../integration/cli/test_salt_proxy.py | 37 +++++++++++++++++-- tests/pytests/integration/conftest.py | 3 -- .../integration/minion/test_return_retries.py | 5 +-- .../pytests/integration/modules/test_virt.py | 15 +++++++- .../integration/modules/test_x509_v2.py | 20 +++++++++- tests/pytests/integration/states/test_file.py | 8 ++++ 9 files changed, 124 insertions(+), 16 deletions(-) diff --git a/tests/pytests/functional/transport/zeromq/conftest.py b/tests/pytests/functional/transport/zeromq/conftest.py index ea6dbd66472..9e8f059e889 100644 --- a/tests/pytests/functional/transport/zeromq/conftest.py +++ b/tests/pytests/functional/transport/zeromq/conftest.py @@ -1,6 +1,8 @@ import pytest from saltfactories.utils import random_string +from tests.conftest import FIPS_TESTRUN + @pytest.fixture def salt_master(salt_factories): @@ -10,7 +12,14 @@ def salt_master(salt_factories): "sign_pub_messages": False, } factory = salt_factories.salt_master_daemon( - random_string("zeromq-master-"), defaults=config_defaults + random_string("zeromq-master-"), + defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) return factory @@ -26,6 +35,12 @@ def salt_minion(salt_master): "master_uri": "tcp://127.0.0.1:{}".format(salt_master.config["ret_port"]), } factory = salt_master.salt_minion_daemon( - random_string("zeromq-minion-"), defaults=config_defaults + random_string("zeromq-minion-"), + defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) return factory diff --git a/tests/pytests/integration/cli/test_salt.py b/tests/pytests/integration/cli/test_salt.py index 035c93dd427..37925160ca6 100644 --- a/tests/pytests/integration/cli/test_salt.py +++ b/tests/pytests/integration/cli/test_salt.py @@ -16,6 +16,7 @@ from pytestshellutils.utils.processes import ProcessResult, terminate_process import salt.defaults.exitcodes import salt.utils.path +from tests.conftest import FIPS_TESTRUN log = logging.getLogger(__name__) @@ -32,6 +33,11 @@ def salt_minion_2(salt_master): """ factory = salt_master.salt_minion_daemon( "minion-2", + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, extra_cli_arguments_after_first_start_failure=["--log-level=info"], ) with factory.started(start_timeout=120): diff --git a/tests/pytests/integration/cli/test_salt_minion.py b/tests/pytests/integration/cli/test_salt_minion.py index c0d60134746..ad623bd30f5 100644 --- a/tests/pytests/integration/cli/test_salt_minion.py +++ b/tests/pytests/integration/cli/test_salt_minion.py @@ -6,6 +6,7 @@ from pytestshellutils.exceptions import FactoryNotStarted from saltfactories.utils import random_string import salt.defaults.exitcodes +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import PRE_PYTEST_SKIP_REASON pytestmark = [ @@ -39,7 +40,15 @@ def test_exit_status_unknown_user(salt_master, minion_id): """ with pytest.raises(FactoryNotStarted) as exc: factory = salt_master.salt_minion_daemon( - minion_id, overrides={"user": "unknown-user"} + minion_id, + overrides={ + "user": "unknown-user", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) factory.start(start_timeout=10, max_start_attempts=1) @@ -52,7 +61,16 @@ def test_exit_status_unknown_argument(salt_master, minion_id): Ensure correct exit status when an unknown argument is passed to salt-minion. """ with pytest.raises(FactoryNotStarted) as exc: - factory = salt_master.salt_minion_daemon(minion_id) + factory = salt_master.salt_minion_daemon( + minion_id, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, + ) factory.start("--unknown-argument", start_timeout=10, max_start_attempts=1) assert exc.value.process_result.returncode == salt.defaults.exitcodes.EX_USAGE @@ -66,6 +84,11 @@ def test_exit_status_correct_usage(salt_master, minion_id, salt_cli): minion_id, extra_cli_arguments_after_first_start_failure=["--log-level=info"], defaults={"transport": salt_master.config["transport"]}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) factory.start() assert factory.is_running() diff --git a/tests/pytests/integration/cli/test_salt_proxy.py b/tests/pytests/integration/cli/test_salt_proxy.py index fbf39e30438..37024d2d76a 100644 --- a/tests/pytests/integration/cli/test_salt_proxy.py +++ b/tests/pytests/integration/cli/test_salt_proxy.py @@ -9,6 +9,7 @@ from pytestshellutils.exceptions import FactoryNotStarted from saltfactories.utils import random_string import salt.defaults.exitcodes +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import PRE_PYTEST_SKIP_REASON log = logging.getLogger(__name__) @@ -32,7 +33,15 @@ def test_exit_status_no_proxyid(salt_master, proxy_minion_id): """ with pytest.raises(FactoryNotStarted) as exc: factory = salt_master.salt_proxy_minion_daemon( - proxy_minion_id, include_proxyid_cli_flag=False + proxy_minion_id, + include_proxyid_cli_flag=False, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) factory.start(start_timeout=10, max_start_attempts=1) @@ -50,7 +59,15 @@ def test_exit_status_unknown_user(salt_master, proxy_minion_id): """ with pytest.raises(FactoryNotStarted) as exc: factory = salt_master.salt_proxy_minion_daemon( - proxy_minion_id, overrides={"user": "unknown-user"} + proxy_minion_id, + overrides={ + "user": "unknown-user", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) factory.start(start_timeout=10, max_start_attempts=1) @@ -65,7 +82,16 @@ def test_exit_status_unknown_argument(salt_master, proxy_minion_id): salt-proxy. """ with pytest.raises(FactoryNotStarted) as exc: - factory = salt_master.salt_proxy_minion_daemon(proxy_minion_id) + factory = salt_master.salt_proxy_minion_daemon( + proxy_minion_id, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, + ) factory.start("--unknown-argument", start_timeout=10, max_start_attempts=1) assert exc.value.process_result.returncode == salt.defaults.exitcodes.EX_USAGE @@ -86,6 +112,11 @@ def test_exit_status_correct_usage(salt_master, proxy_minion_id, salt_cli): proxy_minion_id, extra_cli_arguments_after_first_start_failure=["--log-level=info"], defaults={"transport": salt_master.config["transport"]}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) factory.start() assert factory.is_running() diff --git a/tests/pytests/integration/conftest.py b/tests/pytests/integration/conftest.py index 096bc28e0f2..e505e55a20a 100644 --- a/tests/pytests/integration/conftest.py +++ b/tests/pytests/integration/conftest.py @@ -17,9 +17,6 @@ def salt_master(salt_master_factory): """ A running salt-master fixture """ - print("*" * 80) - print(repr(salt_master_factory)) - print("*" * 80) with salt_master_factory.started(): yield salt_master_factory diff --git a/tests/pytests/integration/minion/test_return_retries.py b/tests/pytests/integration/minion/test_return_retries.py index 03f6146fb6c..35e236dcf18 100644 --- a/tests/pytests/integration/minion/test_return_retries.py +++ b/tests/pytests/integration/minion/test_return_retries.py @@ -15,9 +15,8 @@ def salt_minion_retry(salt_master, salt_minion_id): "return_retry_timer": 5, "return_retry_tries": 30, "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_master.salt_minion_daemon( random_string("retry-minion-"), diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py index a1ce0054189..de31109152f 100644 --- a/tests/pytests/integration/modules/test_virt.py +++ b/tests/pytests/integration/modules/test_virt.py @@ -8,6 +8,7 @@ from xml.etree import ElementTree import pytest +from tests.conftest import FIPS_TESTRUN from tests.support.virt import SaltVirtMinionContainerFactory docker = pytest.importorskip("docker") @@ -42,7 +43,12 @@ def virt_minion_0( "open_mode": True, "transport": salt_master.config["transport"], } - config_overrides = {"user": "root"} + config_overrides = { + "user": "root", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = salt_master.salt_minion_daemon( virt_minion_0_id, name=virt_minion_0_id, @@ -79,7 +85,12 @@ def virt_minion_1( "open_mode": True, "transport": salt_master.config["transport"], } - config_overrides = {"user": "root"} + config_overrides = { + "user": "root", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + } factory = salt_master.salt_minion_daemon( virt_minion_1_id, name=virt_minion_1_id, diff --git a/tests/pytests/integration/modules/test_x509_v2.py b/tests/pytests/integration/modules/test_x509_v2.py index 398e4350c41..68bb6edc623 100644 --- a/tests/pytests/integration/modules/test_x509_v2.py +++ b/tests/pytests/integration/modules/test_x509_v2.py @@ -12,6 +12,7 @@ import pytest from saltfactories.utils import random_string import salt.utils.x509 as x509util +from tests.conftest import FIPS_TESTRUN try: import cryptography @@ -60,7 +61,14 @@ def x509_data( @pytest.fixture(scope="module") def x509_salt_master(salt_factories, ca_minion_id, x509_master_config): factory = salt_factories.salt_master_daemon( - "x509-master", defaults=x509_master_config + "x509-master", + defaults=x509_master_config, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with factory.started(): yield factory @@ -172,6 +180,11 @@ def x509ca_salt_minion(x509_salt_master, ca_minion_id, ca_minion_config): factory = x509_salt_master.salt_minion_daemon( ca_minion_id, defaults=ca_minion_config, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All @@ -191,6 +204,11 @@ def x509_salt_minion(x509_salt_master, x509_minion_id): "features": {"x509_v2": True}, "grains": {"testgrain": "foo"}, }, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All diff --git a/tests/pytests/integration/states/test_file.py b/tests/pytests/integration/states/test_file.py index 14c15c0d6a6..d30b746dba3 100644 --- a/tests/pytests/integration/states/test_file.py +++ b/tests/pytests/integration/states/test_file.py @@ -17,6 +17,7 @@ import salt.utils.files import salt.utils.path import salt.utils.platform from salt.utils.versions import Version +from tests.conftest import FIPS_TESTRUN log = logging.getLogger(__name__) @@ -236,6 +237,10 @@ def salt_secondary_master(request, salt_factories): "fileserver_followsymlinks": False, "publish_port": publish_port, "ret_port": ret_port, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } factory = salt_factories.salt_master_daemon( @@ -258,6 +263,9 @@ def salt_secondary_minion(salt_secondary_master): config_overrides = { "master": salt_secondary_master.config["interface"], "master_port": salt_secondary_master.config["ret_port"], + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_secondary_master.salt_minion_daemon( From a10aa05f41781183e38248dded5c9d01394a153c Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 31 May 2024 14:33:52 -0700 Subject: [PATCH 096/160] Ensure key is bytes not string --- salt/channel/server.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index a43b3addd71..67f2e293b94 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -213,7 +213,9 @@ class ReqServerChannel: log.error("AES key not found") return {"error": "AES key not found"} pret = {} - pret["key"] = pub.encrypt(key, encryption_algorithm) + pret["key"] = pub.encrypt( + salt.utils.stringutils.to_bytes(key), encryption_algorithm + ) if ret is False: ret = {} if sign_messages: From e2e3dc9634788cbfab5b4cf3199b514fcba66940 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 31 May 2024 18:38:16 -0700 Subject: [PATCH 097/160] Windows master tests fix --- salt/channel/server.py | 1 - 1 file changed, 1 deletion(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index 67f2e293b94..173930906b3 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -762,7 +762,6 @@ class PubServerChannel: self.event = salt.utils.event.get_event("master", opts=self.opts, listen=False) self.ckminions = salt.utils.minions.CkMinions(self.opts) self.present = {} - self.master_key = salt.crypt.MasterKeys(self.opts) def close(self): self.transport.close() From 80a2f65e58609198578340724030a6bb4ebc545d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 1 Jun 2024 14:59:23 -0700 Subject: [PATCH 098/160] Attempt to fix windows package tests --- salt/utils/win_functions.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/salt/utils/win_functions.py b/salt/utils/win_functions.py index 66327a88007..24c319749f8 100644 --- a/salt/utils/win_functions.py +++ b/salt/utils/win_functions.py @@ -11,15 +11,21 @@ from salt.exceptions import CommandExecutionError try: import psutil - import pywintypes import win32api import win32net import win32security from win32con import HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE + import pywintypes # isort:skip + HAS_WIN32 = True except ImportError: - HAS_WIN32 = False + try: + import psutil + from win32 import pywintypes, win32api, win32net, win32security + from win32con import HWND_BROADCAST, SMTO_ABORTIFHUNG, WM_SETTINGCHANGE + except ImportError: + HAS_WIN32 = False # Although utils are often directly imported, it is also possible to use the From 534bf76463dc217d752b2f4c63d1f100eea74567 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 3 Jun 2024 12:59:54 -0700 Subject: [PATCH 099/160] Helpful log messages for when things go wrong Provide helpful log messages on the master and minions if a minion uses signing or encryption that is not suppoted by the master. --- salt/channel/server.py | 43 ++++++++---- salt/crypt.py | 65 ++++++++++++++----- salt/exceptions.py | 6 ++ .../unit/crypt/test_crypt_cryptography.py | 7 +- 4 files changed, 89 insertions(+), 32 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index 173930906b3..f7b73b20dc3 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -22,7 +22,7 @@ import salt.utils.minions import salt.utils.platform import salt.utils.stringutils import salt.utils.verify -from salt.exceptions import SaltDeserializationError +from salt.exceptions import SaltDeserializationError, UnsupportedAlgorithm from salt.utils.cache import CacheCli log = logging.getLogger(__name__) @@ -237,15 +237,22 @@ class ReqServerChannel: return pret def _clear_signed(self, load, algorithm): - master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") - tosign = salt.payload.dumps(load) - return { - "enc": "clear", - "load": tosign, - "sig": salt.crypt.PrivateKey(master_pem_path).sign( - tosign, algorithm=algorithm - ), - } + try: + master_pem_path = os.path.join(self.opts["pki_dir"], "master.pem") + tosign = salt.payload.dumps(load) + return { + "enc": "clear", + "load": tosign, + "sig": salt.crypt.PrivateKey(master_pem_path).sign( + tosign, algorithm=algorithm + ), + } + except UnsupportedAlgorithm: + log.info( + "Minion tried to authenticate with unsupported signing algorithm: %s", + algorithm, + ) + return {"enc": "clear", "load": {"ret": "bad sig algo"}} def _update_aes(self): """ @@ -678,6 +685,13 @@ class ReqServerChannel: aes = "{}_|-{}".format( salt.master.SMaster.secrets["aes"]["secret"].value, mtoken ) + except UnsupportedAlgorithm as exc: + log.info( + "Minion %s tried to authenticate with unsupported encryption algorithm: %s", + load["id"], + enc_algo, + ) + return {"enc": "clear", "load": {"ret": "bad enc algo"}} except Exception as exc: # pylint: disable=broad-except log.warning("Token failed to decrypt %s", exc) # Token failed to decrypt, send back the salty bacon to @@ -691,10 +705,17 @@ class ReqServerChannel: try: mtoken = self.master_key.key.decrypt(load["token"], enc_algo) ret["token"] = pub.encrypt(mtoken, enc_algo) + except UnsupportedAlgorithm as exc: + log.info( + "Minion %s tried to authenticate with unsupported encryption algorithm: %s", + load["id"], + enc_algo, + ) + return {"enc": "clear", "load": {"ret": "bad enc algo"}} except Exception as exc: # pylint: disable=broad-except # Token failed to decrypt, send back the salty bacon to # support older minions - log.warning("Token failed to decrypt: %s", exc) + log.warning("Token failed to decrypt: %r", exc) aes = salt.master.SMaster.secrets["aes"]["secret"].value ret["aes"] = pub.encrypt(aes, enc_algo) diff --git a/salt/crypt.py b/salt/crypt.py index 6f44da60198..f30c3b6b628 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -40,6 +40,7 @@ from salt.exceptions import ( MasterExit, SaltClientError, SaltReqTimeoutError, + UnsupportedAlgorithm, ) try: @@ -238,19 +239,27 @@ class PrivateKey(BaseKey): def sign(self, data, algorithm=PKCS1v15_SHA1): _padding = self.parse_padding_for_signing(algorithm) _hash = self.parse_hash(algorithm) - return self.key.sign(salt.utils.stringutils.to_bytes(data), _padding(), _hash()) + try: + return self.key.sign( + salt.utils.stringutils.to_bytes(data), _padding(), _hash() + ) + except cryptography.exceptions.UnsupportedAlgorithm: + raise UnsupportedAlgorithm(f"Unsupported algorithm: {algorithm}") def decrypt(self, data, algorithm=OAEP_SHA1): _padding = self.parse_padding_for_encryption(algorithm) _hash = self.parse_hash(algorithm) - return self.key.decrypt( - data, - _padding( - mgf=padding.MGF1(algorithm=_hash()), - algorithm=_hash(), - label=None, - ), - ) + try: + return self.key.decrypt( + data, + _padding( + mgf=padding.MGF1(algorithm=_hash()), + algorithm=_hash(), + label=None, + ), + ) + except cryptography.exceptions.UnsupportedAlgorithm: + raise UnsupportedAlgorithm(f"Unsupported algorithm: {algorithm}") class PublicKey(BaseKey): @@ -265,14 +274,17 @@ class PublicKey(BaseKey): _padding = self.parse_padding_for_encryption(algorithm) _hash = self.parse_hash(algorithm) bdata = salt.utils.stringutils.to_bytes(data) - return self.key.encrypt( - bdata, - _padding( - mgf=padding.MGF1(algorithm=_hash()), - algorithm=_hash(), - label=None, - ), - ) + try: + return self.key.encrypt( + bdata, + _padding( + mgf=padding.MGF1(algorithm=_hash()), + algorithm=_hash(), + label=None, + ), + ) + except cryptography.exceptions.UnsupportedAlgorithm: + raise UnsupportedAlgorithm(f"Unsupported algorithm: {algorithm}") def verify(self, data, signature, algorithm=PKCS1v15_SHA1): _padding = self.parse_padding_for_signing(algorithm) @@ -752,6 +764,18 @@ class AsyncAuth: "Authentication wait time is %s", acceptance_wait_time ) continue + elif creds == "bad enc algo": + log.error( + "This minion is using a encryption algorithm that is " + "not supported by it's Master. Please check your minion configutation." + ) + break + elif creds == "bad sig algo": + log.error( + "This minion is using a signing algorithm that is " + "not supported by it's Master. Please check your minion configutation." + ) + break break if not isinstance(creds, dict) or "aes" not in creds: if self.opts.get("detect_mode") is True: @@ -853,6 +877,13 @@ class AsyncAuth: if not isinstance(payload, dict) or "load" not in payload: log.error("Sign-in attempt failed: %s", payload) return False + elif isinstance(payload["load"], dict) and "ret" in payload["load"]: + if payload["load"]["ret"] == "bad enc algo": + log.error("Sign-in attempt failed: %s", payload) + return "bad enc algo" + elif payload["load"]["ret"] == "bad sig algo": + log.error("Sign-in attempt failed: %s", payload) + return "bad sig algo" clear_signed_data = payload["load"] clear_signature = payload["sig"] diff --git a/salt/exceptions.py b/salt/exceptions.py index 57a6175de2d..9dd9175281a 100644 --- a/salt/exceptions.py +++ b/salt/exceptions.py @@ -362,6 +362,12 @@ class AuthorizationError(SaltException): """ +class UnsupportedAlgorithm(SaltException): + """ + Thrown when a requested encryption or signing algorithm is un-supported. + """ + + class SaltDaemonNotRunning(SaltException): """ Throw when a running master/minion/syndic is not running but is needed to diff --git a/tests/pytests/unit/crypt/test_crypt_cryptography.py b/tests/pytests/unit/crypt/test_crypt_cryptography.py index f0620f5ee69..9a641b292d5 100644 --- a/tests/pytests/unit/crypt/test_crypt_cryptography.py +++ b/tests/pytests/unit/crypt/test_crypt_cryptography.py @@ -2,7 +2,6 @@ import hashlib import hmac import os -import cryptography.exceptions import pytest from cryptography.hazmat.backends.openssl import backend from cryptography.hazmat.primitives import serialization @@ -344,7 +343,7 @@ def test_loading_encrypted_openssl_format(openssl_encrypted_key, passphrase, tmp @pytest.mark.skipif(not FIPS_TESTRUN, reason="Only valid when in FIPS mode") def test_fips_bad_signing_algo(private_key, passphrase): key = salt.crypt.PrivateKey(private_key, passphrase) - with pytest.raises(cryptography.exceptions.UnsupportedAlgorithm): + with pytest.raises(salt.exceptions.UnsupportedAlgorithm): key.sign("meh", salt.crypt.PKCS1v15_SHA1) @@ -361,7 +360,7 @@ def test_fips_bad_signing_algo_verification(private_key, passphrase): @pytest.mark.skipif(not FIPS_TESTRUN, reason="Only valid when in FIPS mode") def test_fips_bad_encryption_algo(private_key, passphrase): key = salt.crypt.PublicKey(private_key.replace(".pem", ".pub")) - with pytest.raises(cryptography.exceptions.UnsupportedAlgorithm): + with pytest.raises(salt.exceptions.UnsupportedAlgorithm): key.encrypt("meh", salt.crypt.OAEP_SHA1) @@ -370,5 +369,5 @@ def test_fips_bad_decryption_algo(private_key, passphrase): pubkey = LegacyPublicKey(private_key.replace(".pem", ".pub")) data = pubkey.encrypt("meh") key = salt.crypt.PrivateKey(private_key, passphrase) - with pytest.raises(cryptography.exceptions.UnsupportedAlgorithm): + with pytest.raises(salt.exceptions.UnsupportedAlgorithm): key.decrypt(data) From 9d24b796377da7382171398662894eab0db286f9 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 3 Jun 2024 13:02:36 -0700 Subject: [PATCH 100/160] Clean up cruft and add comment --- salt/channel/server.py | 6 ++---- tests/pytests/functional/channel/test_server.py | 12 ------------ 2 files changed, 2 insertions(+), 16 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index f7b73b20dc3..898e5dc9cee 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -53,11 +53,9 @@ class ReqServerChannel: def __init__(self, opts, transport): self.opts = opts self.transport = transport + # The event and master_key attributes will be populated after fork. self.event = None - # self.event = salt.utils.event.get_master_event( - # self.opts, self.opts["sock_dir"], listen=False - # ) - # self.master_key = salt.crypt.MasterKeys(self.opts) + self.master_key = None def pre_fork(self, process_manager): """ diff --git a/tests/pytests/functional/channel/test_server.py b/tests/pytests/functional/channel/test_server.py index e05e7e38937..749800760f4 100644 --- a/tests/pytests/functional/channel/test_server.py +++ b/tests/pytests/functional/channel/test_server.py @@ -78,18 +78,6 @@ def master_config(root_dir, transport): "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" ), ) - # master_conf["transport"] = transport - # master_conf["id"] = "master" - # master_conf["root_dir"] = str(root_dir) - # master_conf["sock_dir"] = str(root_dir) - # master_conf["interface"] = "127.0.0.1" - # master_conf["publish_port"] = ports.get_unused_localhost_port() - # master_conf["ret_port"] = ports.get_unused_localhost_port() - # master_conf["pki_dir"] = str(root_dir / "pki") - # "fips_mode": FIPS_TESTRUN, - # "publish_signing_algorithm": ( - # "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - # ), os.makedirs(master_conf["pki_dir"]) salt.crypt.gen_keys(master_conf["pki_dir"], "master", 4096) minions_keys = os.path.join(master_conf["pki_dir"], "minions") From c408cddfaf10a318b7d16ae2a3e136ca25c0a606 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 3 Jun 2024 13:08:46 -0700 Subject: [PATCH 101/160] Remove un-needed print statments --- tests/pytests/functional/transport/server/test_req_channel.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py index 2fb3b569d54..ae31f8f171d 100644 --- a/tests/pytests/functional/transport/server/test_req_channel.py +++ b/tests/pytests/functional/transport/server/test_req_channel.py @@ -105,7 +105,6 @@ class ReqServerChannelProcess(salt.utils.process.SignalHandlingProcess): @pytest.fixture def req_server_channel(salt_master, req_channel_crypt): - print(f"master pub {salt_master.config['publish_signing_algorithm']}") req_server_channel_process = ReqServerChannelProcess( salt_master.config.copy(), req_channel_crypt ) @@ -129,7 +128,6 @@ def req_channel_crypt(request): @pytest.fixture def push_channel(req_server_channel, salt_minion, req_channel_crypt): - print(f"minion encryption {salt_minion.config['encryption_algorithm']}") with salt.channel.client.ReqChannel.factory( salt_minion.config, crypt=req_channel_crypt ) as _req_channel: From d4b4067ee4ea91abcb6c0357d77806e5fe49ac77 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 5 Jun 2024 15:42:49 -0700 Subject: [PATCH 102/160] Add tests for garbage input --- salt/crypt.py | 10 +- tests/pytests/unit/transport/test_zeromq.py | 300 ++++++++++++++++++++ 2 files changed, 308 insertions(+), 2 deletions(-) diff --git a/salt/crypt.py b/salt/crypt.py index f30c3b6b628..6180da228ef 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -203,20 +203,26 @@ class BaseKey: @staticmethod def parse_padding_for_signing(algorithm): + if algorithm not in VALID_SIGNING_ALGORITHMS: + raise UnsupportedAlgorithm(f"Invalid signing algorithm: {algorithm}") _pad, _hash = algorithm.split("-", 1) if _pad not in VALID_PADDING_FOR_SIGNING: - raise Exception("Invalid padding algorithm") + raise UnsupportedAlgorithm(f"Invalid padding algorithm: {_pad}") return getattr(padding, _pad) @staticmethod def parse_padding_for_encryption(algorithm): + if algorithm not in VALID_ENCRYPTION_ALGORITHMS: + raise UnsupportedAlgorithm(f"Invalid encryption algorithm: {algorithm}") _pad, _hash = algorithm.split("-", 1) if _pad not in VALID_PADDING_FOR_ENCRYPTION: - raise Exception("Invalid padding algorithm") + raise UnsupportedAlgorithm(f"Invalid padding algorithm: {_pad}") return getattr(padding, _pad) @staticmethod def parse_hash(algorithm): + if "-" not in algorithm: + raise UnsupportedAlgorithm(f"Invalid encryption algorithm: {algorithm}") _pad, _hash = algorithm.split("-", 1) if _hash not in VALID_HASHES: raise Exception("Invalid hashing algorithm") diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index f3c9b811e7d..7def553a6bd 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -1688,3 +1688,303 @@ async def test_unclosed_publish_client(minion_opts, io_loop): client.__del__() # pylint: disable=unnecessary-dunder-call finally: client.close() + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only run on fips enabled platforms") +def test_req_server_auth_unsupported_sig_algo( + pki_dir, minion_opts, master_opts, caplog +): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + pub = salt.crypt.PublicKey(str(pki_dir.joinpath("master", "master.pub"))) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + algorithm=minion_opts["encryption_algorithm"], + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": salt.crypt.PKCS1v15_SHA1, + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion tried to authenticate with unsupported signing algorithm: PKCS1v15-SHA1" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad sig algo" + + +def test_req_server_auth_garbage_sig_algo(pki_dir, minion_opts, master_opts, caplog): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + pub = salt.crypt.PublicKey(str(pki_dir.joinpath("master", "master.pub"))) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + algorithm=minion_opts["encryption_algorithm"], + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": "IAMNOTANALGO", + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion tried to authenticate with unsupported signing algorithm: IAMNOTANALGO" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad sig algo" + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only run on fips enabled platforms") +def test_req_server_auth_unsupported_enc_algo( + pki_dir, minion_opts, master_opts, caplog +): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + import tests.pytests.unit.crypt + + pub = tests.pytests.unit.crypt.LegacyPublicKey( + str(pki_dir.joinpath("master", "master.pub")) + ) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": "OAEP-SHA1", + "sig_algo": minion_opts["signing_algorithm"], + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion minion tried to authenticate with unsupported encryption algorithm: OAEP-SHA1" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad enc algo" + + +def test_req_server_auth_garbage_enc_algo(pki_dir, minion_opts, master_opts, caplog): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + import tests.pytests.unit.crypt + + pub = tests.pytests.unit.crypt.LegacyPublicKey( + str(pki_dir.joinpath("master", "master.pub")) + ) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": "IAMNOTAENCALGO", + "sig_algo": minion_opts["signing_algorithm"], + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion minion tried to authenticate with unsupported encryption algorithm: IAMNOTAENCALGO" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad enc algo" From ecd92059af0ec64f2ef8ad461f8563872f9fd54c Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 5 Jun 2024 15:51:49 -0700 Subject: [PATCH 103/160] Add changelog for fips fix --- changelog/66579.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/66579.fixed.md diff --git a/changelog/66579.fixed.md b/changelog/66579.fixed.md new file mode 100644 index 00000000000..ccef663b846 --- /dev/null +++ b/changelog/66579.fixed.md @@ -0,0 +1 @@ +Fix support for FIPS approved encryption and signing algorithms. From 1b39837f34f99cee9bf6663573d96ffab4db74b4 Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Fri, 14 Jun 2024 15:40:57 -0500 Subject: [PATCH 104/160] Use older Linux OS for wider CI deps compatibility --- tools/ci.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 0f9f30ca126..c5cf22cc851 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -967,8 +967,8 @@ def get_ci_deps_matrix(ctx: Context): _matrix = { "linux": [ - {"distro-slug": "rockylinux-9", "arch": "x86_64"}, - {"distro-slug": "rockylinux-9-arm64", "arch": "arm64"}, + {"distro-slug": "amazonlinux-2", "arch": "x86_64"}, + {"distro-slug": "amazonlinux-2-arm64", "arch": "arm64"}, ], "macos": [ {"distro-slug": "macos-12", "arch": "x86_64"}, From 44200b16cccd9aadf201bad0e61fa635ccfc92bf Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 19 Jun 2024 03:48:01 -0700 Subject: [PATCH 105/160] Skip package meta tests on old rpm cli versions --- .../pytests/pkg/integration/test_pkg_meta.py | 29 +++++++++++++++++-- 1 file changed, 27 insertions(+), 2 deletions(-) diff --git a/tests/pytests/pkg/integration/test_pkg_meta.py b/tests/pytests/pkg/integration/test_pkg_meta.py index ead194c5eea..078b07f6518 100644 --- a/tests/pytests/pkg/integration/test_pkg_meta.py +++ b/tests/pytests/pkg/integration/test_pkg_meta.py @@ -1,5 +1,6 @@ import subprocess +import packaging import pytest from pytestskipmarkers.utils import platform @@ -23,6 +24,17 @@ def provides_arch(): return "x86-64" +@pytest.fixture +def rpm_version(): + proc = subprocess.run(["rpm", "--version"], capture_output=True, check=True) + return packaging.version.Version(proc.stdout.decode().rsplit(" ", 1)[-1]) + + +@pytest.fixture +def required_version(): + return packaging.version.Version("4.12") + + @pytest.fixture def artifact_version(install_salt): return install_salt.artifact_version @@ -35,7 +47,14 @@ def package(artifact_version, pkg_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") -def test_provides(install_salt, package, artifact_version, provides_arch): +def test_provides( + install_salt, + package, + artifact_version, + provides_arch, + rpm_version, + required_version, +): if install_salt.distro_id not in ( "almalinux", "rocky", @@ -46,6 +65,8 @@ def test_provides(install_salt, package, artifact_version, provides_arch): "photon", ): pytest.skip("Only tests rpm packages") + if rpm_version < required_version: + pytest.skip(f"Test requires rpm version {required_version}") assert package.exists() valid_provides = [ @@ -66,7 +87,9 @@ def test_provides(install_salt, package, artifact_version, provides_arch): @pytest.mark.skipif(not salt.utils.path.which("rpm"), reason="rpm is not installed") -def test_requires(install_salt, package, artifact_version): +def test_requires( + install_salt, package, artifact_version, rpm_version, required_version +): if install_salt.distro_id not in ( "almalinux", "rocky", @@ -77,6 +100,8 @@ def test_requires(install_salt, package, artifact_version): "photon", ): pytest.skip("Only tests rpm packages") + if rpm_version < required_version: + pytest.skip(f"Test requires rpm version {required_version}") assert package.exists() valid_requires = [ "manual: /bin/sh", From ae459fa73a9dd6d51278347fefa0fbe24760faf3 Mon Sep 17 00:00:00 2001 From: David Murphy Date: Fri, 14 Jun 2024 14:45:52 -0600 Subject: [PATCH 106/160] Update to include croniter in pkg requirements --- requirements/base.txt | 1 + requirements/static/ci/common.in | 2 +- requirements/static/ci/py3.10/cloud.txt | 6 ++++-- requirements/static/ci/py3.10/darwin.txt | 7 +++++-- requirements/static/ci/py3.10/docs.txt | 10 ++++++++++ requirements/static/ci/py3.10/freebsd.txt | 7 +++++-- requirements/static/ci/py3.10/lint.txt | 6 ++++-- requirements/static/ci/py3.10/linux.txt | 7 +++++-- requirements/static/ci/py3.11/cloud.txt | 6 ++++-- requirements/static/ci/py3.11/darwin.txt | 7 +++++-- requirements/static/ci/py3.11/docs.txt | 10 ++++++++++ requirements/static/ci/py3.11/freebsd.txt | 7 +++++-- requirements/static/ci/py3.11/lint.txt | 6 ++++-- requirements/static/ci/py3.11/linux.txt | 7 +++++-- requirements/static/ci/py3.12/cloud.txt | 6 ++++-- requirements/static/ci/py3.12/darwin.txt | 7 +++++-- requirements/static/ci/py3.12/docs.txt | 10 ++++++++++ requirements/static/ci/py3.12/freebsd.txt | 7 +++++-- requirements/static/ci/py3.12/lint.txt | 6 ++++-- requirements/static/ci/py3.12/linux.txt | 7 +++++-- requirements/static/ci/py3.7/cloud.txt | 6 ++++-- requirements/static/ci/py3.7/docs.txt | 10 ++++++++++ requirements/static/ci/py3.7/freebsd.txt | 7 +++++-- requirements/static/ci/py3.7/linux.txt | 7 +++++-- requirements/static/ci/py3.8/cloud.txt | 6 ++++-- requirements/static/ci/py3.8/docs.txt | 10 ++++++++++ requirements/static/ci/py3.8/freebsd.txt | 7 +++++-- requirements/static/ci/py3.8/lint.txt | 6 ++++-- requirements/static/ci/py3.8/linux.txt | 7 +++++-- requirements/static/ci/py3.9/cloud.txt | 6 ++++-- requirements/static/ci/py3.9/darwin.txt | 7 +++++-- requirements/static/ci/py3.9/docs.txt | 10 ++++++++++ requirements/static/ci/py3.9/freebsd.txt | 7 +++++-- requirements/static/ci/py3.9/lint.txt | 6 ++++-- requirements/static/ci/py3.9/linux.txt | 7 +++++-- requirements/static/pkg/py3.10/darwin.txt | 10 ++++++++-- requirements/static/pkg/py3.10/freebsd.txt | 10 ++++++++-- requirements/static/pkg/py3.10/linux.txt | 10 ++++++++-- requirements/static/pkg/py3.11/darwin.txt | 10 ++++++++-- requirements/static/pkg/py3.11/freebsd.txt | 10 ++++++++-- requirements/static/pkg/py3.11/linux.txt | 10 ++++++++-- requirements/static/pkg/py3.12/darwin.txt | 10 ++++++++-- requirements/static/pkg/py3.12/freebsd.txt | 10 ++++++++-- requirements/static/pkg/py3.12/linux.txt | 10 ++++++++-- requirements/static/pkg/py3.7/freebsd.txt | 10 ++++++++-- requirements/static/pkg/py3.7/linux.txt | 10 ++++++++-- requirements/static/pkg/py3.8/freebsd.txt | 10 ++++++++-- requirements/static/pkg/py3.8/linux.txt | 10 ++++++++-- requirements/static/pkg/py3.9/darwin.txt | 10 ++++++++-- requirements/static/pkg/py3.9/freebsd.txt | 10 ++++++++-- requirements/static/pkg/py3.9/linux.txt | 10 ++++++++-- 51 files changed, 314 insertions(+), 87 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index f1cc4adf560..32a8b0f6221 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -10,5 +10,6 @@ distro>=1.0.1 psutil>=5.0.0 packaging>=21.3 looseversion +croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32' # We need contextvars for salt-ssh contextvars diff --git a/requirements/static/ci/common.in b/requirements/static/ci/common.in index da673b5bbef..761e34a08a8 100644 --- a/requirements/static/ci/common.in +++ b/requirements/static/ci/common.in @@ -11,7 +11,7 @@ certifi>=2022.12.07 cffi>=1.14.6 cherrypy>=17.4.1 clustershell -croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32' +# croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32' dnspython etcd3-py==0.1.6 gitpython>=3.1.37 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index b43f5f3143e..0eb2d5177d6 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -118,10 +118,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt @@ -535,6 +536,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index dacd02681b4..129898b242a 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -86,8 +86,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt @@ -376,6 +378,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 9bece493c21..8f04442954a 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -28,6 +28,10 @@ contextvars==2.4 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt distro==1.5.0 # via # -c requirements/static/ci/py3.10/linux.txt @@ -125,10 +129,15 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.17.2 # via sphinx +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # croniter pytz==2022.1 # via # -c requirements/static/ci/py3.10/linux.txt # babel + # croniter # tempora pyyaml==6.0.1 # via @@ -149,6 +158,7 @@ six==1.16.0 # -c requirements/static/ci/py3.10/linux.txt # cheroot # more-itertools + # python-dateutil # sphinxcontrib.httpdomain snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index cfaa9ee68e3..bea5ec1ca29 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -83,8 +83,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt @@ -369,6 +371,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 4dc1b40f8af..b9c4b66f8d1 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -130,10 +130,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt @@ -499,6 +500,7 @@ pytz==2022.1 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 61ffeccd08c..2641bd1d25f 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -92,8 +92,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt @@ -391,6 +393,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 0ff243302ee..4a745cd4f31 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -114,10 +114,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt @@ -499,6 +500,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 3818583b3e7..b32c62f48eb 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -82,8 +82,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt @@ -347,6 +349,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt index d00eefe0b60..8a55c4b94f7 100644 --- a/requirements/static/ci/py3.11/docs.txt +++ b/requirements/static/ci/py3.11/docs.txt @@ -28,6 +28,10 @@ contextvars==2.4 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt distro==1.5.0 # via # -c requirements/static/ci/py3.11/linux.txt @@ -125,10 +129,15 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.17.2 # via sphinx +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # croniter pytz==2022.1 # via # -c requirements/static/ci/py3.11/linux.txt # babel + # croniter # tempora pyyaml==6.0.1 # via @@ -149,6 +158,7 @@ six==1.16.0 # -c requirements/static/ci/py3.11/linux.txt # cheroot # more-itertools + # python-dateutil # sphinxcontrib.httpdomain snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index fb2079a8dbb..c0649b96f54 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -81,8 +81,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt @@ -346,6 +348,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt index cbc4ac82192..65aef229e1e 100644 --- a/requirements/static/ci/py3.11/lint.txt +++ b/requirements/static/ci/py3.11/lint.txt @@ -126,10 +126,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt @@ -466,6 +467,7 @@ pytz==2022.1 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 9139f5b9c5d..a386c6d95b0 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -90,8 +90,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt @@ -368,6 +370,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 336862055ad..09f3d25a844 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -114,10 +114,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt @@ -499,6 +500,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 1d560cd8ca5..c8227a620ca 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -82,8 +82,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt @@ -347,6 +349,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt index b0cc780a3e4..ef54a0f5fec 100644 --- a/requirements/static/ci/py3.12/docs.txt +++ b/requirements/static/ci/py3.12/docs.txt @@ -28,6 +28,10 @@ contextvars==2.4 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt distro==1.5.0 # via # -c requirements/static/ci/py3.12/linux.txt @@ -125,10 +129,15 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.17.2 # via sphinx +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # croniter pytz==2022.1 # via # -c requirements/static/ci/py3.12/linux.txt # babel + # croniter # tempora pyyaml==6.0.1 # via @@ -149,6 +158,7 @@ six==1.16.0 # -c requirements/static/ci/py3.12/linux.txt # cheroot # more-itertools + # python-dateutil # sphinxcontrib.httpdomain snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index 8c7effb3e9b..776d62349ba 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -81,8 +81,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt @@ -346,6 +348,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index 90e2db426b2..18a938f6c3a 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -126,10 +126,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt @@ -466,6 +467,7 @@ pytz==2022.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index b357af52a8b..2ebfab472b2 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -90,8 +90,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt @@ -368,6 +370,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index e23530c06c7..57c077481ab 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -132,10 +132,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.7/linux.txt # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -c requirements/static/ci/py3.7/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.7/linux.txt @@ -582,6 +583,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.7/linux.txt # -c requirements/static/ci/py3.7/linux.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index fe2340ca4c1..512e5ca8f36 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -28,6 +28,10 @@ contextvars==2.4 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt distro==1.5.0 # via # -c requirements/static/ci/py3.7/linux.txt @@ -129,10 +133,15 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.17.2 # via sphinx +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # croniter pytz==2022.1 # via # -c requirements/static/ci/py3.7/linux.txt # babel + # croniter # tempora pyyaml==6.0.1 # via @@ -153,6 +162,7 @@ six==1.16.0 # -c requirements/static/ci/py3.7/linux.txt # cheroot # more-itertools + # python-dateutil # sphinxcontrib.httpdomain snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 72262e524fb..37a74ed3f86 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -93,8 +93,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.7/freebsd.txt @@ -409,6 +411,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index a141ce6f364..594ffb2e9de 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -100,8 +100,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.7/linux.txt @@ -426,6 +428,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.7/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 6e839778e86..e1a3e034ca2 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -127,10 +127,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt @@ -569,6 +570,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index f9b0d6c2c5b..e4a7834692a 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -28,6 +28,10 @@ contextvars==2.4 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt distro==1.5.0 # via # -c requirements/static/ci/py3.8/linux.txt @@ -125,10 +129,15 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.17.2 # via sphinx +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # croniter pytz==2022.1 # via # -c requirements/static/ci/py3.8/linux.txt # babel + # croniter # tempora pyyaml==6.0.1 # via @@ -149,6 +158,7 @@ six==1.16.0 # -c requirements/static/ci/py3.8/linux.txt # cheroot # more-itertools + # python-dateutil # sphinxcontrib.httpdomain snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index d20d09a828e..13b9753e92d 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -88,8 +88,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt @@ -396,6 +398,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index ba5e838f4a7..e5c14f3986c 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -135,10 +135,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt @@ -526,6 +527,7 @@ pytz==2022.1 # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index dc90201dabe..c0773244b1b 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -95,8 +95,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt @@ -413,6 +415,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index c6e054da965..c4175c31613 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -127,10 +127,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt @@ -571,6 +572,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 33130f7c391..56b95b03d9b 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -91,8 +91,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt @@ -405,6 +407,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index de9c15d9fe7..19f80df0096 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -28,6 +28,10 @@ contextvars==2.4 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt distro==1.5.0 # via # -c requirements/static/ci/py3.9/linux.txt @@ -129,10 +133,15 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.17.2 # via sphinx +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # croniter pytz==2022.1 # via # -c requirements/static/ci/py3.9/linux.txt # babel + # croniter # tempora pyyaml==6.0.1 # via @@ -153,6 +162,7 @@ six==1.16.0 # -c requirements/static/ci/py3.9/linux.txt # cheroot # more-itertools + # python-dateutil # sphinxcontrib.httpdomain snowballstemmer==2.1.0 # via sphinx diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index d2f97c974e8..7a94ca7c62a 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -88,8 +88,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt @@ -398,6 +400,7 @@ python-gnupg==0.4.8 pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # croniter # moto # tempora pyvmomi==6.7.1.2018.12 diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 8072024c420..2a5da53eb93 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -131,10 +131,11 @@ contextvars==2.4 # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" +croniter==2.0.5 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt - # -r requirements/static/ci/common.in + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt @@ -524,6 +525,7 @@ pytz==2022.1 # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index f446f6269a5..563bd28600d 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -93,8 +93,10 @@ contextvars==2.4 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt -croniter==0.3.29 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in +croniter==2.0.5 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/base.txt cryptography==42.0.5 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt @@ -413,6 +415,7 @@ pytz==2022.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # apscheduler + # croniter # moto # python-telegram-bot # tempora diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index f3a7993a1a3..b860eda060a 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -18,6 +18,8 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/darwin.txt @@ -84,11 +86,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 - # via -r requirements/darwin.txt + # via + # -r requirements/darwin.txt + # croniter python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index ec9370b9b0f..a812499ca8f 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/freebsd.in @@ -74,11 +76,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/freebsd.in + # via + # -r requirements/static/pkg/freebsd.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index d8530c627d4..cb0821f6af5 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/linux.in @@ -72,11 +74,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/linux.in + # via + # -r requirements/static/pkg/linux.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt index 6ab0e386c75..4c351dfaa6f 100644 --- a/requirements/static/pkg/py3.11/darwin.txt +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -18,6 +18,8 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/darwin.txt @@ -84,11 +86,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 - # via -r requirements/darwin.txt + # via + # -r requirements/darwin.txt + # croniter python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt index 37f28be3baf..0c00120e572 100644 --- a/requirements/static/pkg/py3.11/freebsd.txt +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/freebsd.in @@ -74,11 +76,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/freebsd.in + # via + # -r requirements/static/pkg/freebsd.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt index 45a0e650191..f4a6d28b930 100644 --- a/requirements/static/pkg/py3.11/linux.txt +++ b/requirements/static/pkg/py3.11/linux.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/linux.in @@ -72,11 +74,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/linux.in + # via + # -r requirements/static/pkg/linux.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt index cc835236248..baf8c97f33d 100644 --- a/requirements/static/pkg/py3.12/darwin.txt +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -18,6 +18,8 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/darwin.txt @@ -84,11 +86,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 - # via -r requirements/darwin.txt + # via + # -r requirements/darwin.txt + # croniter python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt index 32bc2af556a..4e6d7ca14ca 100644 --- a/requirements/static/pkg/py3.12/freebsd.txt +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/freebsd.in @@ -74,11 +76,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/freebsd.in + # via + # -r requirements/static/pkg/freebsd.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt index 757e57f378a..fcfd7bc2d20 100644 --- a/requirements/static/pkg/py3.12/linux.txt +++ b/requirements/static/pkg/py3.12/linux.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/linux.in @@ -72,11 +74,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/linux.in + # via + # -r requirements/static/pkg/linux.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index 0bda53f2c9a..511abe0cad7 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/freebsd.in @@ -72,11 +74,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/freebsd.in + # via + # -r requirements/static/pkg/freebsd.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index fd3bfc55945..1fbd1d13569 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/linux.in @@ -70,11 +72,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/linux.in + # via + # -r requirements/static/pkg/linux.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index 3a9828a3ed1..ec75accaad4 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/freebsd.in @@ -72,11 +74,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/freebsd.in + # via + # -r requirements/static/pkg/freebsd.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index 71c2efa5d9e..b5151d758a9 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/linux.in @@ -70,11 +72,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/linux.in + # via + # -r requirements/static/pkg/linux.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 225a43f2cff..6200b39d76e 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -18,6 +18,8 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/darwin.txt @@ -84,11 +86,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 - # via -r requirements/darwin.txt + # via + # -r requirements/darwin.txt + # croniter python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 1554d549788..2ad4b2529de 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/freebsd.in @@ -74,11 +76,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/freebsd.in + # via + # -r requirements/static/pkg/freebsd.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index be8abe8afde..5902b17a3dc 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -16,6 +16,8 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt +croniter==2.0.5 ; sys_platform != "win32" + # via -r requirements/base.txt cryptography==42.0.5 # via # -r requirements/static/pkg/linux.in @@ -72,11 +74,15 @@ pycryptodomex==3.19.1 pyopenssl==24.0.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 - # via -r requirements/static/pkg/linux.in + # via + # -r requirements/static/pkg/linux.in + # croniter python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 - # via tempora + # via + # croniter + # tempora pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 From 8a9c92a72ddc9d0d9babfe49ae15b91c20f40ab1 Mon Sep 17 00:00:00 2001 From: David Murphy Date: Fri, 14 Jun 2024 14:54:41 -0600 Subject: [PATCH 107/160] Updated changelog --- changelog/57649.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/57649.fixed.md diff --git a/changelog/57649.fixed.md b/changelog/57649.fixed.md new file mode 100644 index 00000000000..12d22a0531c --- /dev/null +++ b/changelog/57649.fixed.md @@ -0,0 +1 @@ + Update to include croniter in pkg requirements From d1d84e87b354011cb386b0991e32be226bf83f85 Mon Sep 17 00:00:00 2001 From: dmurphy18 Date: Tue, 18 Jun 2024 15:37:06 -0600 Subject: [PATCH 108/160] Removed comment line and removed typo --- requirements/base.txt | 2 +- requirements/static/ci/common.in | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 32a8b0f6221..31d4f29be11 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -10,6 +10,6 @@ distro>=1.0.1 psutil>=5.0.0 packaging>=21.3 looseversion -croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32' +croniter>=0.3.0,!=0.3.22; sys_platform != 'win32' # We need contextvars for salt-ssh contextvars diff --git a/requirements/static/ci/common.in b/requirements/static/ci/common.in index 761e34a08a8..76983495f42 100644 --- a/requirements/static/ci/common.in +++ b/requirements/static/ci/common.in @@ -11,7 +11,6 @@ certifi>=2022.12.07 cffi>=1.14.6 cherrypy>=17.4.1 clustershell -# croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32' dnspython etcd3-py==0.1.6 gitpython>=3.1.37 From c174570e5362c760ffa54b0d2ffbf2e99204efa1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 18 Jun 2024 23:39:16 -0700 Subject: [PATCH 109/160] Allow NamedLoaderContexts to be returned from loader It is useful in some cases to return NamedLoaderContexts from loaded functions. Instead of choking or requireing implimenters to call the context's value() method before being de-scoped, detect when a NamedLoaderContext has been returned and return the value from the current context. --- salt/loader/lazy.py | 5 ++++- tests/pytests/integration/modules/test_config.py | 8 ++++++++ tests/pytests/unit/loader/test_loader.py | 13 +++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 tests/pytests/integration/modules/test_config.py diff --git a/salt/loader/lazy.py b/salt/loader/lazy.py index a3b795a3757..cb4424455cd 100644 --- a/salt/loader/lazy.py +++ b/salt/loader/lazy.py @@ -1257,7 +1257,10 @@ class LazyLoader(salt.utils.lazy.LazyDict): self.parent_loader = current_loader token = salt.loader.context.loader_ctxvar.set(self) try: - return _func_or_method(*args, **kwargs) + ret = _func_or_method(*args, **kwargs) + if isinstance(ret, salt.loader.context.NamedLoaderContext): + ret = ret.value() + return ret finally: self.parent_loader = None salt.loader.context.loader_ctxvar.reset(token) diff --git a/tests/pytests/integration/modules/test_config.py b/tests/pytests/integration/modules/test_config.py new file mode 100644 index 00000000000..afdf4706050 --- /dev/null +++ b/tests/pytests/integration/modules/test_config.py @@ -0,0 +1,8 @@ +import pytest + + +@pytest.mark.slow_test +def test_config_items(salt_cli, salt_minion): + ret = salt_cli.run("config.items", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert isinstance(ret.data, dict) diff --git a/tests/pytests/unit/loader/test_loader.py b/tests/pytests/unit/loader/test_loader.py index e7359c6a74a..ea0883b9388 100644 --- a/tests/pytests/unit/loader/test_loader.py +++ b/tests/pytests/unit/loader/test_loader.py @@ -83,3 +83,16 @@ def test_named_loader_context_name_not_packed(tmp_path): match="LazyLoader does not have a packed value for: __not_packed__", ): loader["mymod.foobar"]() + + +def test_return_named_context_from_loaded_func(tmp_path): + opts = { + "optimization_order": [0], + } + contents = """ + def foobar(): + return __test__ + """ + with pytest.helpers.temp_file("mymod.py", contents, directory=tmp_path): + loader = salt.loader.LazyLoader([tmp_path], opts, pack={"__test__": "meh"}) + assert loader["mymod.foobar"]() == "meh" From 2f84693dc33b360c8176fb0fc2862ca6cccf20cc Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 18 Jun 2024 23:44:22 -0700 Subject: [PATCH 110/160] Add changelog for #65251 --- changelog/65251.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/65251.fixed.md diff --git a/changelog/65251.fixed.md b/changelog/65251.fixed.md new file mode 100644 index 00000000000..e8abd5af327 --- /dev/null +++ b/changelog/65251.fixed.md @@ -0,0 +1 @@ +Fix config.items when called on minion From 56234c13f17ce93447616f87e9128f911f7066dc Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 22 Apr 2024 17:26:18 +0200 Subject: [PATCH 111/160] Add test for issue #66414 --- .../pytests/functional/states/test_x509_v2.py | 66 ++++++++++++++++++- 1 file changed, 63 insertions(+), 3 deletions(-) diff --git a/tests/pytests/functional/states/test_x509_v2.py b/tests/pytests/functional/states/test_x509_v2.py index 01c877fceda..7f4952dd073 100644 --- a/tests/pytests/functional/states/test_x509_v2.py +++ b/tests/pytests/functional/states/test_x509_v2.py @@ -1,5 +1,6 @@ import base64 import pathlib +import shutil import pytest @@ -31,7 +32,28 @@ pytestmark = [ @pytest.fixture(scope="module") -def minion_config_overrides(): +def ca_dir(tmp_path_factory): + ca_dir = tmp_path_factory.mktemp("ca") + try: + yield ca_dir + finally: + shutil.rmtree(str(ca_dir), ignore_errors=True) + + +@pytest.fixture(scope="module") +def ca_key_file(ca_dir, ca_key): + with pytest.helpers.temp_file("ca.key", ca_key, ca_dir) as key: + yield key + + +@pytest.fixture(scope="module") +def ca_cert_file(ca_dir, ca_cert): + with pytest.helpers.temp_file("ca.crt", ca_cert, ca_dir) as crt: + yield crt + + +@pytest.fixture(scope="module") +def minion_config_overrides(ca_key_file, ca_cert_file): return { "x509_signing_policies": { "testpolicy": { @@ -47,6 +69,11 @@ def minion_config_overrides(): "testnosubjectpolicy": { "CN": "from_signing_policy", }, + "test_fixed_signing_private_key": { + "subject": "CN=from_signing_policy", + "signing_cert": str(ca_cert_file), + "signing_private_key": str(ca_key_file), + }, }, "features": { "x509_v2": True, @@ -59,7 +86,7 @@ def x509(loaders, states, tmp_path): yield states.x509 -@pytest.fixture +@pytest.fixture(scope="module") def ca_cert(): return """\ -----BEGIN CERTIFICATE----- @@ -85,7 +112,7 @@ LN1w5sybsYwIw6QN """ -@pytest.fixture +@pytest.fixture(scope="module") def ca_key(): return """\ -----BEGIN RSA PRIVATE KEY----- @@ -838,6 +865,20 @@ def test_certificate_managed_with_signing_policy(x509, cert_args, rsa_privkey, c assert _signed_by(cert, ca_key) +def test_certificate_managed_with_fixed_signing_key_in_signing_policy( + x509, rsa_privkey, ca_key, cert_args +): + cert_args["signing_policy"] = "test_fixed_signing_private_key" + cert_args["private_key"] = rsa_privkey + ret = x509.certificate_managed(**cert_args) + assert ret.result is True + assert ret.changes + assert ret.changes.get("created") + cert = _get_cert(cert_args["name"]) + assert _belongs_to(cert, rsa_privkey) + assert _signed_by(cert, ca_key) + + def test_certificate_managed_with_distinguished_name_kwargs( x509, cert_args, rsa_privkey, ca_key ): @@ -920,6 +961,25 @@ def test_certificate_managed_existing_with_signing_policy(x509, cert_args): _assert_not_changed(ret) +@pytest.mark.usefixtures("existing_cert") +@pytest.mark.parametrize( + "existing_cert", + [{"signing_policy": "test_fixed_signing_private_key"}], + indirect=True, +) +def test_certificate_managed_existing_with_fixed_signing_key_in_signing_policy( + x509, rsa_privkey, ca_key, cert_args +): + """ + If the policy defines a fixed signing_private_key and a certificate + is managed locally (without ca_server), the state module should not crash + when checking for changes. + Issue #66414 + """ + ret = x509.certificate_managed(**cert_args) + _assert_not_changed(ret) + + @pytest.mark.usefixtures("existing_cert") @pytest.mark.parametrize( "existing_cert", From c1642c5b5018378b647c0ef43f39daed408a909d Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 22 Apr 2024 17:27:07 +0200 Subject: [PATCH 112/160] Make local signing with policy containing signing key work --- changelog/66414.fixed.md | 1 + salt/states/x509_v2.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog/66414.fixed.md diff --git a/changelog/66414.fixed.md b/changelog/66414.fixed.md new file mode 100644 index 00000000000..e777d18226d --- /dev/null +++ b/changelog/66414.fixed.md @@ -0,0 +1 @@ +Fixed x509_v2 certificate.managed crash for locally signed certificates if the signing policy defines signing_private_key diff --git a/salt/states/x509_v2.py b/salt/states/x509_v2.py index af1cb05e75b..93c80003e22 100644 --- a/salt/states/x509_v2.py +++ b/salt/states/x509_v2.py @@ -1606,10 +1606,12 @@ def _build_cert( ca_server=None, signing_policy=None, signing_private_key=None, **kwargs ): final_kwargs = copy.deepcopy(kwargs) + final_kwargs["signing_private_key"] = signing_private_key x509util.merge_signing_policy( __salt__["x509.get_signing_policy"](signing_policy, ca_server=ca_server), final_kwargs, ) + signing_private_key = final_kwargs.pop("signing_private_key") builder, _, private_key_loaded, signing_cert = x509util.build_crt( signing_private_key, From a37e5c704bedaef588f98df0ef0379878e6aa87f Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 22 Apr 2024 18:00:39 +0200 Subject: [PATCH 113/160] Use file paths for x509_v2 functional state tests ... they are more realistic. --- tests/pytests/functional/states/test_x509_v2.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/pytests/functional/states/test_x509_v2.py b/tests/pytests/functional/states/test_x509_v2.py index 7f4952dd073..2d5d07f4db8 100644 --- a/tests/pytests/functional/states/test_x509_v2.py +++ b/tests/pytests/functional/states/test_x509_v2.py @@ -412,11 +412,11 @@ O68= @pytest.fixture -def cert_args(tmp_path, ca_cert, ca_key): +def cert_args(tmp_path, ca_cert_file, ca_key_file): return { "name": f"{tmp_path}/cert", - "signing_private_key": ca_key, - "signing_cert": ca_cert, + "signing_private_key": str(ca_key_file), + "signing_cert": str(ca_cert_file), "CN": "success", } @@ -443,11 +443,11 @@ def cert_args_exts(): @pytest.fixture -def crl_args(tmp_path, ca_cert, ca_key): +def crl_args(tmp_path, ca_cert_file, ca_key_file): return { "name": f"{tmp_path}/crl", - "signing_private_key": ca_key, - "signing_cert": ca_cert, + "signing_private_key": str(ca_key_file), + "signing_cert": str(ca_cert_file), "revoked": [], } From da375bb682b26c590ce67c762010d59f37902aa4 Mon Sep 17 00:00:00 2001 From: Daniel Mach Date: Thu, 18 May 2023 09:26:21 +0200 Subject: [PATCH 114/160] Migrate string formatting in 'pass' renderer to a f-string --- salt/renderers/pass.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/salt/renderers/pass.py b/salt/renderers/pass.py index 12f7d47e833..16d793e99b2 100644 --- a/salt/renderers/pass.py +++ b/salt/renderers/pass.py @@ -158,9 +158,7 @@ def _fetch_secret(pass_path): pass_error = pass_error.decode("utf-8") except (AttributeError, ValueError): pass - msg = "Could not fetch secret '{}' from the password store: {}".format( - pass_path, pass_error - ) + msg = f"Could not fetch secret '{pass_path}' from the password store: {pass_error}" if pass_strict_fetch: raise SaltRenderError(msg) else: From 3bc6e3a01d0258fba8a764da2a139a2458b3f55e Mon Sep 17 00:00:00 2001 From: Daniel Mach Date: Thu, 18 May 2023 10:15:03 +0200 Subject: [PATCH 115/160] Fix utf8 handling in 'pass' renderer and make it more robust (cherry picked from commit 8dfc923876e4a9b6e88efb0a5598c93dbbf967da) --- changelog/64300.fixed.md | 1 + salt/renderers/pass.py | 8 +- tests/pytests/unit/renderers/test_pass.py | 99 +++++++++++++++++++++++ 3 files changed, 102 insertions(+), 6 deletions(-) create mode 100644 changelog/64300.fixed.md diff --git a/changelog/64300.fixed.md b/changelog/64300.fixed.md new file mode 100644 index 00000000000..4418db1d04c --- /dev/null +++ b/changelog/64300.fixed.md @@ -0,0 +1 @@ +Fix utf8 handling in 'pass' renderer diff --git a/salt/renderers/pass.py b/salt/renderers/pass.py index 16d793e99b2..825810ce68f 100644 --- a/salt/renderers/pass.py +++ b/salt/renderers/pass.py @@ -144,20 +144,16 @@ def _fetch_secret(pass_path): env["GNUPGHOME"] = pass_gnupghome try: - proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env) + proc = Popen(cmd, stdout=PIPE, stderr=PIPE, env=env, encoding="utf-8") pass_data, pass_error = proc.communicate() pass_returncode = proc.returncode - except OSError as e: + except (OSError, UnicodeDecodeError) as e: pass_data, pass_error = "", str(e) pass_returncode = 1 # The version of pass used during development sent output to # stdout instead of stderr even though its returncode was non zero. if pass_returncode or not pass_data: - try: - pass_error = pass_error.decode("utf-8") - except (AttributeError, ValueError): - pass msg = f"Could not fetch secret '{pass_path}' from the password store: {pass_error}" if pass_strict_fetch: raise SaltRenderError(msg) diff --git a/tests/pytests/unit/renderers/test_pass.py b/tests/pytests/unit/renderers/test_pass.py index 1e2ebb7ea8b..f7c79e1fe17 100644 --- a/tests/pytests/unit/renderers/test_pass.py +++ b/tests/pytests/unit/renderers/test_pass.py @@ -1,8 +1,12 @@ import importlib +import os +import shutil +import tempfile import pytest import salt.exceptions +import salt.utils.files from tests.support.mock import MagicMock, patch # "pass" is a reserved keyword, we need to import it differently @@ -19,6 +23,47 @@ def configure_loader_modules(master_opts): } +@pytest.fixture() +def pass_executable(request): + tmp_dir = tempfile.mkdtemp(prefix="salt_pass_") + pass_path = os.path.join(tmp_dir, "pass") + with salt.utils.files.fopen(pass_path, "w") as f: + f.write("#!/bin/sh\n") + # return path path wrapped into unicode characters + # pass args ($1, $2) are ("show", ) + f.write('echo "α>>> $2 <<<β"\n') + os.chmod(pass_path, 0o755) + yield pass_path + shutil.rmtree(tmp_dir) + + +@pytest.fixture() +def pass_executable_error(request): + tmp_dir = tempfile.mkdtemp(prefix="salt_pass_") + pass_path = os.path.join(tmp_dir, "pass") + with salt.utils.files.fopen(pass_path, "w") as f: + f.write("#!/bin/sh\n") + # return error message with unicode characters + f.write('echo "ERROR: αβγ" >&2\n') + f.write("exit 1\n") + os.chmod(pass_path, 0o755) + yield pass_path + shutil.rmtree(tmp_dir) + + +@pytest.fixture() +def pass_executable_invalid_utf8(request): + tmp_dir = tempfile.mkdtemp(prefix="salt_pass_") + pass_path = os.path.join(tmp_dir, "pass") + with salt.utils.files.fopen(pass_path, "wb") as f: + f.write(b"#!/bin/sh\n") + # return invalid utf-8 sequence + f.write(b'echo "\x80\x81"\n') + os.chmod(pass_path, 0o755) + yield pass_path + shutil.rmtree(tmp_dir) + + # The default behavior is that if fetching a secret from pass fails, # the value is passed through. Even the trailing newlines are preserved. def test_passthrough(): @@ -161,3 +206,57 @@ def test_env(): call_args, call_kwargs = popen_mock.call_args_list[0] assert call_kwargs["env"]["GNUPGHOME"] == config["pass_gnupghome"] assert call_kwargs["env"]["PASSWORD_STORE_DIR"] == config["pass_dir"] + + +@pytest.mark.skip_on_windows(reason="Not supported on Windows") +def test_utf8(pass_executable): + config = { + "pass_variable_prefix": "pass:", + "pass_strict_fetch": True, + } + mocks = { + "_get_pass_exec": MagicMock(return_value=pass_executable), + } + + pass_path = "pass:secret" + with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks): + result = pass_.render(pass_path) + assert result == "α>>> secret <<<β" + + +@pytest.mark.skip_on_windows(reason="Not supported on Windows") +def test_utf8_error(pass_executable_error): + config = { + "pass_variable_prefix": "pass:", + "pass_strict_fetch": True, + } + mocks = { + "_get_pass_exec": MagicMock(return_value=pass_executable_error), + } + + pass_path = "pass:secret" + with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks): + with pytest.raises( + salt.exceptions.SaltRenderError, + match=r"Could not fetch secret 'secret' from the password store: ERROR: αβγ", + ): + result = pass_.render(pass_path) + + +@pytest.mark.skip_on_windows(reason="Not supported on Windows") +def test_invalid_utf8(pass_executable_invalid_utf8): + config = { + "pass_variable_prefix": "pass:", + "pass_strict_fetch": True, + } + mocks = { + "_get_pass_exec": MagicMock(return_value=pass_executable_invalid_utf8), + } + + pass_path = "pass:secret" + with patch.dict(pass_.__opts__, config), patch.dict(pass_.__dict__, mocks): + with pytest.raises( + salt.exceptions.SaltRenderError, + match=r"Could not fetch secret 'secret' from the password store: 'utf-8' codec can't decode byte 0x80 in position 0: invalid start byte", + ): + result = pass_.render(pass_path) From 0b92bfdf80ab40787072d546b6962c77dd24e0b7 Mon Sep 17 00:00:00 2001 From: Matthias Kesler Date: Thu, 2 May 2024 13:04:17 +0200 Subject: [PATCH 116/160] Fix #66194: Exchange HTTPClient by AsyncHTTPClient in salt.utils.http --- changelog/66330.fixed.md | 1 + salt/utils/http.py | 13 +++++++------ 2 files changed, 8 insertions(+), 6 deletions(-) create mode 100644 changelog/66330.fixed.md diff --git a/changelog/66330.fixed.md b/changelog/66330.fixed.md new file mode 100644 index 00000000000..25d7caf9923 --- /dev/null +++ b/changelog/66330.fixed.md @@ -0,0 +1 @@ +fix #66194: Exchange HTTPClient by AsyncHTTPClient in salt.utils.http diff --git a/salt/utils/http.py b/salt/utils/http.py index 4d6c53faf6e..67fc05ce469 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -25,7 +25,7 @@ import zlib import tornado.httpclient import tornado.httputil import tornado.simple_httpclient -from tornado.httpclient import HTTPClient +from tornado.httpclient import AsyncHTTPClient import salt.config import salt.loader @@ -43,6 +43,7 @@ import salt.utils.xmlutil as xml import salt.utils.yaml import salt.version from salt.template import compile_template +from salt.utils.asynchronous import SyncWrapper from salt.utils.decorators.jinja import jinja_filter try: @@ -598,7 +599,7 @@ def query( salt.config.DEFAULT_MINION_OPTS["http_request_timeout"], ) - tornado.httpclient.AsyncHTTPClient.configure(None) + AsyncHTTPClient.configure(None) client_argspec = salt.utils.args.get_function_argspec( tornado.simple_httpclient.SimpleAsyncHTTPClient.initialize ) @@ -629,10 +630,10 @@ def query( req_kwargs = salt.utils.data.decode(req_kwargs, to_str=True) try: - download_client = ( - HTTPClient(max_body_size=max_body) - if supports_max_body_size - else HTTPClient() + download_client = SyncWrapper( + AsyncHTTPClient, + kwargs={"max_body_size": max_body} if supports_max_body_size else {}, + async_methods=["fetch"], ) result = download_client.fetch(url_full, **req_kwargs) except tornado.httpclient.HTTPError as exc: From a2fac531279c7e4f7a69b8c12f1564e7bfda88c4 Mon Sep 17 00:00:00 2001 From: "Jamie (Bear) Murphy" <1613241+ITJamie@users.noreply.github.com> Date: Mon, 27 May 2024 20:34:16 +0100 Subject: [PATCH 117/160] test httpclient in pillar --- .../pillar/test_httpclient_in_pillar.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 tests/pytests/integration/pillar/test_httpclient_in_pillar.py diff --git a/tests/pytests/integration/pillar/test_httpclient_in_pillar.py b/tests/pytests/integration/pillar/test_httpclient_in_pillar.py new file mode 100644 index 00000000000..8cbecbfcce2 --- /dev/null +++ b/tests/pytests/integration/pillar/test_httpclient_in_pillar.py @@ -0,0 +1,27 @@ + +def test_pillar_using_http_query(salt_master, salt_minion, salt_cli, tmp_path): + pillar_top = """ + base: + "*": + - http_pillar_test + """ + my_pillar = """ + {%- set something = salt['http.query']('https://raw.githubusercontent.com/saltstack/salt/master/.pre-commit-config.yaml', raise_error=False, verify_ssl=False, status=True, timeout=5).status %} + http_query_test: {{ something }} + """ + + + with salt_master.pillar_tree.base.temp_file("top.sls", pillar_top): + with salt_master.pillar_tree.base.temp_file("http_pillar_test.sls", my_pillar): + with salt_master.pillar_tree.base.temp_file("http_pillar_test.sls", my_pillar): + ret = salt_cli.run("state.apply", minion_tgt=salt_minion.id) + assert ret.returncode == 1 + assert ( + ret.data["no_|-states_|-states_|-None"]["comment"] + == "No states found for this minion" + ) + + pillar_ret = salt_cli.run("pillar.item", "http_query_test", minion_tgt=salt_minion.id) + assert pillar_ret.returncode == 0 + + assert '"http_query_test": 200' in pillar_ret.stdout From a461c4bbec6f2d4bb22d9994aec04cc9a1d050bc Mon Sep 17 00:00:00 2001 From: Jamie Murphy Date: Tue, 4 Jun 2024 12:46:52 +0100 Subject: [PATCH 118/160] precommit fixes --- .../integration/pillar/test_httpclient_in_pillar.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/tests/pytests/integration/pillar/test_httpclient_in_pillar.py b/tests/pytests/integration/pillar/test_httpclient_in_pillar.py index 8cbecbfcce2..905a8f51cb3 100644 --- a/tests/pytests/integration/pillar/test_httpclient_in_pillar.py +++ b/tests/pytests/integration/pillar/test_httpclient_in_pillar.py @@ -1,4 +1,3 @@ - def test_pillar_using_http_query(salt_master, salt_minion, salt_cli, tmp_path): pillar_top = """ base: @@ -10,10 +9,11 @@ def test_pillar_using_http_query(salt_master, salt_minion, salt_cli, tmp_path): http_query_test: {{ something }} """ - with salt_master.pillar_tree.base.temp_file("top.sls", pillar_top): with salt_master.pillar_tree.base.temp_file("http_pillar_test.sls", my_pillar): - with salt_master.pillar_tree.base.temp_file("http_pillar_test.sls", my_pillar): + with salt_master.pillar_tree.base.temp_file( + "http_pillar_test.sls", my_pillar + ): ret = salt_cli.run("state.apply", minion_tgt=salt_minion.id) assert ret.returncode == 1 assert ( @@ -21,7 +21,9 @@ def test_pillar_using_http_query(salt_master, salt_minion, salt_cli, tmp_path): == "No states found for this minion" ) - pillar_ret = salt_cli.run("pillar.item", "http_query_test", minion_tgt=salt_minion.id) + pillar_ret = salt_cli.run( + "pillar.item", "http_query_test", minion_tgt=salt_minion.id + ) assert pillar_ret.returncode == 0 assert '"http_query_test": 200' in pillar_ret.stdout From 561ad66dbb81414c2b5e9d847ebca7661566eb71 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 01:33:01 -0700 Subject: [PATCH 119/160] Fix linter error --- tests/pytests/unit/utils/test_gitfs_locks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 241f61cefbc..56af63df064 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -12,7 +12,7 @@ import time import pytest from saltfactories.utils import random_string -import salt.ext.tornado.ioloop +import tornado.ioloop import salt.fileserver.gitfs import salt.utils.files import salt.utils.gitfs @@ -45,7 +45,7 @@ def _get_user(): def _clear_instance_map(): try: del salt.utils.gitfs.GitFS.instance_map[ - salt.ext.tornado.ioloop.IOLoop.current() + tornado.ioloop.IOLoop.current() ] except KeyError: pass From 9c12b06903ff65627f76ee85f160a599ac1e308f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 04:11:08 -0700 Subject: [PATCH 120/160] test fixes --- tests/pytests/unit/transport/test_zeromq.py | 41 ++++++++++---------- tests/pytests/unit/utils/test_gitfs_locks.py | 6 +-- 2 files changed, 23 insertions(+), 24 deletions(-) diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 220b1d37d5e..cb224034124 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -9,6 +9,7 @@ import uuid import msgpack import pytest +import tornado.gen import zmq.eventloop.future import salt.config @@ -357,14 +358,14 @@ def run_loop_in_thread(loop, evt): """ loop.make_current() - @salt.ext.tornado.gen.coroutine + @tornado.gen.coroutine def stopper(): - yield salt.ext.tornado.gen.sleep(0.1) + yield tornado.gen.sleep(0.1) while True: if not evt.is_set(): loop.stop() break - yield salt.ext.tornado.gen.sleep(0.3) + yield tornado.gen.sleep(0.3) loop.add_callback(evt.set) loop.add_callback(stopper) @@ -396,7 +397,7 @@ class MockSaltMinionMaster: self.server_channel = salt.channel.server.ReqServerChannel.factory(master_opts) self.server_channel.pre_fork(self.process_manager) - self.io_loop = salt.ext.tornado.ioloop.IOLoop() + self.io_loop = tornado.ioloop.IOLoop() self.evt = threading.Event() self.server_channel.post_fork(self._handle_payload, io_loop=self.io_loop) self.server_thread = threading.Thread( @@ -439,13 +440,13 @@ class MockSaltMinionMaster: # pylint: enable=W1701 @classmethod - @salt.ext.tornado.gen.coroutine + @tornado.gen.coroutine def _handle_payload(cls, payload): """ TODO: something besides echo """ cls.mock._handle_payload_hook() - raise salt.ext.tornado.gen.Return((payload, {"fun": "send_clear"})) + raise tornado.gen.Return((payload, {"fun": "send_clear"})) @pytest.mark.parametrize("message", ["", [], ()]) @@ -475,7 +476,7 @@ def test_serverside_exception(temp_salt_minion, temp_salt_master): """ with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master: with patch.object(minion_master.mock, "_handle_payload_hook") as _mock: - _mock.side_effect = salt.ext.tornado.gen.Return(({}, {"fun": "madeup-fun"})) + _mock.side_effect = tornado.gen.Return(({}, {"fun": "madeup-fun"})) ret = minion_master.channel.send({}, timeout=5, tries=1) assert ret == "Server-side exception handling payload" @@ -498,7 +499,7 @@ def test_zeromq_async_pub_channel_publish_port(temp_salt_master): sign_pub_messages=False, ) opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts) - ioloop = salt.ext.tornado.ioloop.IOLoop() + ioloop = tornado.ioloop.IOLoop() transport = salt.transport.zeromq.PublishClient(opts, ioloop) with transport: patch_socket = MagicMock(return_value=True) @@ -540,7 +541,7 @@ def test_zeromq_async_pub_channel_filtering_decode_message_no_match( ) opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts) - ioloop = salt.ext.tornado.ioloop.IOLoop() + ioloop = tornado.ioloop.IOLoop() channel = salt.transport.zeromq.PublishClient(opts, ioloop) with channel: with patch( @@ -587,7 +588,7 @@ def test_zeromq_async_pub_channel_filtering_decode_message( ) opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts) - ioloop = salt.ext.tornado.ioloop.IOLoop() + ioloop = tornado.ioloop.IOLoop() channel = salt.transport.zeromq.PublishClient(opts, ioloop) with channel: with patch( @@ -602,7 +603,7 @@ def test_zeromq_async_pub_channel_filtering_decode_message( def test_req_server_chan_encrypt_v2( pki_dir, encryption_algorithm, signing_algorithm, master_opts ): - loop = salt.ext.tornado.ioloop.IOLoop.current() + loop = tornado.ioloop.IOLoop.current() master_opts.update( { "worker_threads": 1, @@ -651,7 +652,7 @@ def test_req_server_chan_encrypt_v2( def test_req_server_chan_encrypt_v1(pki_dir, encryption_algorithm, master_opts): - loop = salt.ext.tornado.ioloop.IOLoop.current() + loop = tornado.ioloop.IOLoop.current() master_opts.update( { "worker_threads": 1, @@ -767,7 +768,7 @@ async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_ print(minion_opts["encryption_algorithm"]) - @salt.ext.tornado.gen.coroutine + @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) @@ -780,7 +781,7 @@ async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_ encryption_algorithm=minion_opts["encryption_algorithm"], signing_algorithm=minion_opts["signing_algorithm"], ) - raise salt.ext.tornado.gen.Return(ret) + raise tornado.gen.Return(ret) client.transport.send = mocksend @@ -853,10 +854,10 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce( signing_algorithm=minion_opts["signing_algorithm"], ) - @salt.ext.tornado.gen.coroutine + @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg - raise salt.ext.tornado.gen.Return(ret) + raise tornado.gen.Return(ret) client.transport.send = mocksend @@ -920,7 +921,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature( client.auth.crypticle.loads = auth.crypticle.loads client.transport = MagicMock() - @salt.ext.tornado.gen.coroutine + @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) @@ -944,7 +945,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature( data["pillar"] = {"pillar1": "bar"} signed_msg["data"] = salt.payload.dumps(data) ret[dictkey] = pcrypt.dumps(signed_msg) - raise salt.ext.tornado.gen.Return(ret) + raise tornado.gen.Return(ret) client.transport.send = mocksend @@ -1008,7 +1009,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key( client.auth.crypticle.loads = auth.crypticle.loads client.transport = MagicMock() - @salt.ext.tornado.gen.coroutine + @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) @@ -1035,7 +1036,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key( ret[dictkey] = pcrypt.dumps(signed_msg) key = salt.utils.stringutils.to_bytes(key) ret["key"] = pub.encrypt(key, minion_opts["encryption_algorithm"]) - raise salt.ext.tornado.gen.Return(ret) + raise tornado.gen.Return(ret) client.transport.send = mocksend diff --git a/tests/pytests/unit/utils/test_gitfs_locks.py b/tests/pytests/unit/utils/test_gitfs_locks.py index 56af63df064..b599a0733b0 100644 --- a/tests/pytests/unit/utils/test_gitfs_locks.py +++ b/tests/pytests/unit/utils/test_gitfs_locks.py @@ -10,9 +10,9 @@ import signal import time import pytest +import tornado.ioloop from saltfactories.utils import random_string -import tornado.ioloop import salt.fileserver.gitfs import salt.utils.files import salt.utils.gitfs @@ -44,9 +44,7 @@ def _get_user(): def _clear_instance_map(): try: - del salt.utils.gitfs.GitFS.instance_map[ - tornado.ioloop.IOLoop.current() - ] + del salt.utils.gitfs.GitFS.instance_map[tornado.ioloop.IOLoop.current()] except KeyError: pass From 5f1b51901c57010f32a60604bae62bbbfc3b479f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 07:40:28 -0700 Subject: [PATCH 121/160] Avoid circular import --- salt/channel/server.py | 7 +++-- salt/utils/jinja.py | 3 +- tests/pytests/unit/crypt/test_crypt.py | 3 +- .../unit/crypt/test_crypt_cryptodome.py | 29 ------------------- 4 files changed, 8 insertions(+), 34 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index 9e7258e17c1..d54fafbf827 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -57,9 +57,10 @@ class ReqServerChannel: def __init__(self, opts, transport): self.opts = opts self.transport = transport - # The event and master_key attributes will be populated after fork. - self.event = None - self.master_key = None + self.event = salt.utils.event.get_master_event( + self.opts, self.opts["sock_dir"], listen=False + ) + self.master_key = salt.crypt.MasterKeys(self.opts) @property def aes_key(self): diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py index f802156ddb8..6c65976ea3b 100644 --- a/salt/utils/jinja.py +++ b/salt/utils/jinja.py @@ -22,7 +22,6 @@ from jinja2.environment import TemplateModule from jinja2.exceptions import TemplateRuntimeError from jinja2.ext import Extension -import salt.fileclient import salt.utils.data import salt.utils.files import salt.utils.json @@ -93,6 +92,8 @@ class SaltCacheLoader(BaseLoader): or not hasattr(self._file_client, "opts") or self._file_client.opts["file_roots"] != self.opts["file_roots"] ): + import salt.fileclient + self._file_client = salt.fileclient.get_file_client( self.opts, self.pillar_rend ) diff --git a/tests/pytests/unit/crypt/test_crypt.py b/tests/pytests/unit/crypt/test_crypt.py index 5ef1613281b..349e820701c 100644 --- a/tests/pytests/unit/crypt/test_crypt.py +++ b/tests/pytests/unit/crypt/test_crypt.py @@ -155,7 +155,8 @@ def test_master_keys_with_cluster_id(tmp_path, master_opts): def test_pwdata_decrypt(): key_string = dedent( - """-----BEGIN RSA PRIVATE KEY----- + """ + -----BEGIN RSA PRIVATE KEY----- MIIEpQIBAAKCAQEAzhBRyyHa7b63RLE71uKMKgrpulcAJjaIaN68ltXcCvy4w9pi Kj+4I3Qp6RvUaHOEmymqyjOMjQc6iwpe0scCFqh3nUk5YYaLZ3WAW0htQVlnesgB ZiBg9PBeTQY/LzqtudL6RCng/AX+fbnCsddlIysRxnUoNVMvz0gAmCY2mnTDjcTt diff --git a/tests/pytests/unit/crypt/test_crypt_cryptodome.py b/tests/pytests/unit/crypt/test_crypt_cryptodome.py index 442cc9a50f7..6d40353d01f 100644 --- a/tests/pytests/unit/crypt/test_crypt_cryptodome.py +++ b/tests/pytests/unit/crypt/test_crypt_cryptodome.py @@ -6,8 +6,6 @@ import pytest import salt.crypt from tests.support.mock import MagicMock, MockCall, mock_open, patch -from . import MSG, PRIVKEY_DATA, PUBKEY_DATA, SIG - RSA = pytest.importorskip("Cryptodome.PublicKey.RSA") try: @@ -96,30 +94,3 @@ def test_gen_keys_with_passphrase(tmp_path): salt.crypt.gen_keys(key_path, "keyname", 2048) assert open_priv_wb in m_open.calls assert open_pub_wb in m_open.calls - - -def test_sign_message(): - key = RSA.importKey(PRIVKEY_DATA) - with patch("salt.crypt.get_rsa_key", return_value=key): - assert SIG == salt.crypt.sign_message("/keydir/keyname.pem", MSG) - - -def test_sign_message_with_passphrase(): - key = RSA.importKey(PRIVKEY_DATA) - with patch("salt.crypt.get_rsa_key", return_value=key): - assert SIG == salt.crypt.sign_message( - "/keydir/keyname.pem", MSG, passphrase="password" - ) - - -def test_verify_signature(): - with patch("salt.utils.files.fopen", mock_open(read_data=PUBKEY_DATA)): - assert salt.crypt.verify_signature("/keydir/keyname.pub", MSG, SIG) - - -def test_bad_key(key_to_test): - """ - Load public key with an invalid header and validate it without m2crypto - """ - key = salt.crypt.get_rsa_pub_key(key_to_test) - assert key.can_encrypt() From 84a9175e2c4bd2efb3734dc51b5b77e771f5238d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 11:42:42 -0700 Subject: [PATCH 122/160] Fix merge forward duplicate tests --- .../unit/crypt/test_crypt_cryptodome.py | 39 - tests/pytests/unit/test_request_channel.py | 469 ++++-- tests/pytests/unit/transport/test_zeromq.py | 1354 ----------------- 3 files changed, 355 insertions(+), 1507 deletions(-) diff --git a/tests/pytests/unit/crypt/test_crypt_cryptodome.py b/tests/pytests/unit/crypt/test_crypt_cryptodome.py index 6d40353d01f..8a27e36a1d8 100644 --- a/tests/pytests/unit/crypt/test_crypt_cryptodome.py +++ b/tests/pytests/unit/crypt/test_crypt_cryptodome.py @@ -55,42 +55,3 @@ def test_gen_keys(tmp_path): salt.crypt.gen_keys(key_path, "keyname", 2048) assert open_priv_wb in m_open.calls assert open_pub_wb in m_open.calls - - -@pytest.mark.slow_test -def test_gen_keys_with_passphrase(tmp_path): - - key_path = str(tmp_path / "keydir") - open_priv_wb = MockCall(os.path.join(key_path, "keyname.pem"), "wb+") - open_pub_wb = MockCall(os.path.join(key_path, "keyname.pub"), "wb+") - - real_is_file = os.path.isfile - - def is_file(path): - if path.startswith(str(tmp_path)): - return False - return real_is_file(path) - - with patch.multiple( - os, - umask=MagicMock(), - chmod=MagicMock(), - access=MagicMock(return_value=True), - ): - salt.crypt.gen_keys(key_path, "keyname", 2048) - with patch("salt.utils.files.fopen", mock_open()) as m_open, patch( - "os.path.isfile", return_value=True - ): - result = salt.crypt.gen_keys( - key_path, "keyname", 2048, passphrase="password" - ) - assert result == os.path.join(key_path, "keyname.pem") - assert open_priv_wb not in m_open.calls - assert open_pub_wb not in m_open.calls - - with patch("salt.utils.files.fopen", mock_open()) as m_open, patch( - "salt.crypt.os.path.isfile", is_file - ): - salt.crypt.gen_keys(key_path, "keyname", 2048) - assert open_priv_wb in m_open.calls - assert open_pub_wb in m_open.calls diff --git a/tests/pytests/unit/test_request_channel.py b/tests/pytests/unit/test_request_channel.py index f4a0f031266..624c1f4adc1 100644 --- a/tests/pytests/unit/test_request_channel.py +++ b/tests/pytests/unit/test_request_channel.py @@ -22,19 +22,9 @@ import salt.transport.zeromq import salt.utils.process import salt.utils.stringutils from salt.master import SMaster +from tests.conftest import FIPS_TESTRUN from tests.support.mock import MagicMock, patch -try: - from M2Crypto import RSA - - HAS_M2 = True -except ImportError: - HAS_M2 = False - try: - from Cryptodome.Cipher import PKCS1_OAEP - except ImportError: - from Crypto.Cipher import PKCS1_OAEP # nosec - log = logging.getLogger(__name__) @@ -218,6 +208,20 @@ oQIDAQAB AES_KEY = "8wxWlOaMMQ4d3yT74LL4+hGrGTf65w8VgrcNjLJeLRQ2Q6zMa8ItY2EQUgMKKDb7JY+RnPUxbB0=" +@pytest.fixture +def signing_algorithm(): + if FIPS_TESTRUN: + return salt.crypt.PKCS1v15_SHA224 + return salt.crypt.PKCS1v15_SHA1 + + +@pytest.fixture +def encryption_algorithm(): + if FIPS_TESTRUN: + return salt.crypt.OAEP_SHA224 + return salt.crypt.OAEP_SHA1 + + @pytest.fixture def pki_dir(tmp_path): _pki_dir = tmp_path / "pki" @@ -478,58 +482,9 @@ def test_serverside_exception(temp_salt_minion, temp_salt_master): assert ret == "Server-side exception handling payload" -def test_req_server_chan_encrypt_v2(master_opts, pki_dir): - loop = tornado.ioloop.IOLoop.current() - master_opts.update( - { - "worker_threads": 1, - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "zmq_monitor": False, - "mworker_queue_niceness": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("master")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - } - ) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - dictkey = "pillar" - nonce = "abcdefg" - pillar_data = {"pillar1": "meh"} - try: - ret = server._encrypt_private(pillar_data, dictkey, "minion", nonce) - assert "key" in ret - assert dictkey in ret - - key = salt.crypt.get_rsa_key( - str(pki_dir.joinpath("minion", "minion.pem")), None - ) - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) # pylint: disable=used-before-assignment - aes = cipher.decrypt(ret["key"]) - pcrypt = salt.crypt.Crypticle(master_opts, aes) - signed_msg = pcrypt.loads(ret[dictkey]) - - assert "sig" in signed_msg - assert "data" in signed_msg - data = salt.payload.loads(signed_msg["data"]) - assert "key" in data - assert data["key"] == ret["key"] - assert "key" in data - assert data["nonce"] == nonce - assert "pillar" in data - assert data["pillar"] == pillar_data - finally: - server.close() - - -def test_req_server_chan_encrypt_v1(master_opts, pki_dir): +def test_req_server_chan_encrypt_v2( + pki_dir, encryption_algorithm, signing_algorithm, master_opts +): loop = tornado.ioloop.IOLoop.current() master_opts.update( { @@ -553,20 +508,70 @@ def test_req_server_chan_encrypt_v1(master_opts, pki_dir): pillar_data = {"pillar1": "meh"} try: ret = server._encrypt_private( - pillar_data, dictkey, "minion", sign_messages=False + pillar_data, + dictkey, + "minion", + nonce, + encryption_algorithm=encryption_algorithm, + signing_algorithm=signing_algorithm, + ) + assert "key" in ret + assert dictkey in ret + + key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem"))) + aes = key.decrypt(ret["key"], encryption_algorithm) + pcrypt = salt.crypt.Crypticle(master_opts, aes) + signed_msg = pcrypt.loads(ret[dictkey]) + + assert "sig" in signed_msg + assert "data" in signed_msg + data = salt.payload.loads(signed_msg["data"]) + assert "key" in data + assert data["key"] == ret["key"] + assert "key" in data + assert data["nonce"] == nonce + assert "pillar" in data + assert data["pillar"] == pillar_data + finally: + server.close() + + +def test_req_server_chan_encrypt_v1(pki_dir, encryption_algorithm, master_opts): + loop = tornado.ioloop.IOLoop.current() + master_opts.update( + { + "worker_threads": 1, + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "zmq_monitor": False, + "mworker_queue_niceness": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("master")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + } + ) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + dictkey = "pillar" + nonce = "abcdefg" + pillar_data = {"pillar1": "meh"} + try: + ret = server._encrypt_private( + pillar_data, + dictkey, + "minion", + sign_messages=False, + encryption_algorithm=encryption_algorithm, ) assert "key" in ret assert dictkey in ret - key = salt.crypt.get_rsa_key( - str(pki_dir.joinpath("minion", "minion.pem")), None - ) - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem"))) + aes = key.decrypt(ret["key"], encryption_algorithm) pcrypt = salt.crypt.Crypticle(master_opts, aes) data = pcrypt.loads(ret[dictkey]) assert data == pillar_data @@ -574,7 +579,9 @@ def test_req_server_chan_encrypt_v1(master_opts, pki_dir): server.close() -def test_req_chan_decode_data_dict_entry_v1(minion_opts, master_opts, pki_dir): +def test_req_chan_decode_data_dict_entry_v1( + pki_dir, encryption_algorithm, minion_opts, master_opts +): mockloop = MagicMock() minion_opts.update( { @@ -591,20 +598,22 @@ def test_req_chan_decode_data_dict_entry_v1(minion_opts, master_opts, pki_dir): "acceptance_wait_time_max": 3, } ) - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master"))) server = salt.channel.server.ReqServerChannel.factory(master_opts) client = salt.channel.client.ReqChannel.factory(minion_opts, io_loop=mockloop) try: dictkey = "pillar" target = "minion" pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private(pillar_data, dictkey, target, sign_messages=False) + ret = server._encrypt_private( + pillar_data, + dictkey, + target, + sign_messages=False, + encryption_algorithm=encryption_algorithm, + ) key = client.auth.get_keys() - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + aes = key.decrypt(ret["key"], encryption_algorithm) pcrypt = salt.crypt.Crypticle(client.opts, aes) ret_pillar_data = pcrypt.loads(ret[dictkey]) assert ret_pillar_data == pillar_data @@ -647,16 +656,22 @@ async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_ client.auth.get_keys = auth.get_keys client.auth.crypticle.dumps = auth.crypticle.dumps client.auth.crypticle.loads = auth.crypticle.loads - real_transport = client.transport client.transport = MagicMock() - real_transport.close() + + print(minion_opts["encryption_algorithm"]) @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True + pillar_data, + dictkey, + target, + nonce=load["nonce"], + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) raise tornado.gen.Return(ret) @@ -764,7 +779,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce( async def test_req_chan_decode_data_dict_entry_v2_bad_signature( - minion_opts, master_opts, pki_dir + pki_dir, minion_opts, master_opts ): mockloop = MagicMock() minion_opts.update( @@ -800,24 +815,24 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature( client.auth.get_keys = auth.get_keys client.auth.crypticle.dumps = auth.crypticle.dumps client.auth.crypticle.loads = auth.crypticle.loads - real_transport = client.transport client.transport = MagicMock() - real_transport.close() @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True + pillar_data, + dictkey, + target, + nonce=load["nonce"], + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) key = client.auth.get_keys() - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + aes = key.decrypt(ret["key"], minion_opts["encryption_algorithm"]) pcrypt = salt.crypt.Crypticle(client.opts, aes) signed_msg = pcrypt.loads(ret[dictkey]) # Changing the pillar data will cause the signature verification to @@ -856,7 +871,7 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_signature( async def test_req_chan_decode_data_dict_entry_v2_bad_key( - minion_opts, master_opts, pki_dir + pki_dir, minion_opts, master_opts ): mockloop = MagicMock() minion_opts.update( @@ -885,46 +900,42 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key( # Mock auth and message client. auth = client.auth - auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) + auth._crypticle = salt.crypt.Crypticle(master_opts, AES_KEY) client.auth = MagicMock() client.auth.mpub = auth.mpub client.auth.authenticated = True client.auth.get_keys = auth.get_keys client.auth.crypticle.dumps = auth.crypticle.dumps client.auth.crypticle.loads = auth.crypticle.loads - real_transport = client.transport client.transport = MagicMock() - real_transport.close() @tornado.gen.coroutine def mocksend(msg, timeout=60, tries=3): client.transport.msg = msg load = client.auth.crypticle.loads(msg["load"]) ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=load["nonce"], sign_messages=True + pillar_data, + dictkey, + target, + nonce=load["nonce"], + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) - key = client.auth.get_keys() - if HAS_M2: - aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(key) - aes = cipher.decrypt(ret["key"]) + mkey = client.auth.get_keys() + aes = mkey.decrypt(ret["key"], minion_opts["encryption_algorithm"]) pcrypt = salt.crypt.Crypticle(client.opts, aes) signed_msg = pcrypt.loads(ret[dictkey]) # Now encrypt with a different key key = salt.crypt.Crypticle.generate_key_string() - pcrypt = salt.crypt.Crypticle(minion_opts, key) + pcrypt = salt.crypt.Crypticle(master_opts, key) pubfn = os.path.join(master_opts["pki_dir"], "minions", "minion") - pub = salt.crypt.get_rsa_pub_key(pubfn) + pub = salt.crypt.PublicKey(pubfn) ret[dictkey] = pcrypt.dumps(signed_msg) key = salt.utils.stringutils.to_bytes(key) - if HAS_M2: - ret["key"] = pub.public_encrypt(key, RSA.pkcs1_oaep_padding) - else: - cipher = PKCS1_OAEP.new(pub) - ret["key"] = cipher.encrypt(key) + ret["key"] = pub.encrypt(key, minion_opts["encryption_algorithm"]) raise tornado.gen.Return(ret) client.transport.send = mocksend @@ -941,7 +952,6 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_key( "ver": "2", "cmd": "_pillar", } - try: with pytest.raises(salt.crypt.AuthenticationError) as excinfo: await client.crypted_transfer_decode_dictentry( @@ -1128,7 +1138,7 @@ async def test_req_chan_auth_v2(minion_opts, master_opts, pki_dir, io_loop): async def test_req_chan_auth_v2_with_master_signing( - minion_opts, master_opts, pki_dir, io_loop + pki_dir, io_loop, minion_opts, master_opts ): minion_opts.update( { @@ -1158,14 +1168,17 @@ async def test_req_chan_auth_v2_with_master_signing( ), "reload": salt.crypt.Crypticle.generate_key_string, } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master"))) master_opts["master_sign_pubkey"] = True master_opts["master_use_pubkey_signature"] = False - master_opts["signing_key_pass"] = True + master_opts["signing_key_pass"] = "" master_opts["master_sign_key_name"] = "master_sign" server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) minion_opts["verify_master_pubkey_sign"] = True minion_opts["always_verify_signature"] = True @@ -1205,6 +1218,9 @@ async def test_req_chan_auth_v2_with_master_signing( server = salt.channel.server.ReqServerChannel.factory(master_opts) server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) server.master_key = salt.crypt.MasterKeys(server.opts) signin_payload = client.auth.minion_sign_in_payload() @@ -1448,3 +1464,228 @@ async def test_req_chan_bad_payload_to_decode( server._decode_payload({}) with pytest.raises(salt.exceptions.SaltDeserializationError): server._decode_payload(12345) + + +def test_req_server_auth_garbage_sig_algo(pki_dir, minion_opts, master_opts, caplog): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + pub = salt.crypt.PublicKey(str(pki_dir.joinpath("master", "master.pub"))) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + algorithm=minion_opts["encryption_algorithm"], + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": "IAMNOTANALGO", + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion tried to authenticate with unsupported signing algorithm: IAMNOTANALGO" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad sig algo" + + +@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only run on fips enabled platforms") +def test_req_server_auth_unsupported_enc_algo( + pki_dir, minion_opts, master_opts, caplog +): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + import tests.pytests.unit.crypt + + pub = tests.pytests.unit.crypt.LegacyPublicKey( + str(pki_dir.joinpath("master", "master.pub")) + ) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": "OAEP-SHA1", + "sig_algo": minion_opts["signing_algorithm"], + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion minion tried to authenticate with unsupported encryption algorithm: OAEP-SHA1" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad enc algo" + + +def test_req_server_auth_garbage_enc_algo(pki_dir, minion_opts, master_opts, caplog): + minion_opts.update( + { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "master_sign_pubkey": False, + "publish_port": 4505, + "auth_mode": 1, + } + ) + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) + server.cache_cli = False + server.event = salt.utils.event.get_master_event( + master_opts, master_opts["sock_dir"], listen=False + ) + server.master_key = salt.crypt.MasterKeys(server.opts) + import tests.pytests.unit.crypt + + pub = tests.pytests.unit.crypt.LegacyPublicKey( + str(pki_dir.joinpath("master", "master.pub")) + ) + token = pub.encrypt( + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ) + nonce = uuid.uuid4().hex + + # We need to read the public key with fopen otherwise the newlines might + # not match on windows. + with salt.utils.files.fopen( + str(pki_dir.joinpath("minion", "minion.pub")), "r" + ) as fp: + pub_key = salt.crypt.clean_key(fp.read()) + + load = { + "version": 2, + "cmd": "_auth", + "id": "minion", + "token": token, + "pub": pub_key, + "nonce": "asdfse", + "enc_algo": "IAMNOTAENCALGO", + "sig_algo": minion_opts["signing_algorithm"], + } + with caplog.at_level(logging.INFO): + ret = server._auth(load, sign_messages=True) + assert ( + "Minion minion tried to authenticate with unsupported encryption algorithm: IAMNOTAENCALGO" + in caplog.text + ) + assert "load" in ret + assert "ret" in ret["load"] + assert ret["load"]["ret"] == "bad enc algo" diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index cb224034124..18a98f439f3 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -2,10 +2,8 @@ import ctypes import hashlib import logging import multiprocessing -import os import threading import time -import uuid import msgpack import pytest @@ -260,98 +258,6 @@ def pki_dir(tmp_path): yield _pki_dir -def test_master_uri(): - """ - test _get_master_uri method - """ - - m_ip = "127.0.0.1" - m_port = 4505 - s_ip = "111.1.0.1" - s_port = 4058 - - m_ip6 = "1234:5678::9abc" - s_ip6 = "1234:5678::1:9abc" - - with patch("salt.transport.zeromq.LIBZMQ_VERSION_INFO", (4, 1, 6)), patch( - "salt.transport.zeromq.ZMQ_VERSION_INFO", (16, 0, 1) - ): - # pass in both source_ip and source_port - assert ( - salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port, source_ip=s_ip, source_port=s_port - ) - == f"tcp://{s_ip}:{s_port};{m_ip}:{m_port}" - ) - - assert ( - salt.transport.zeromq._get_master_uri( - master_ip=m_ip6, master_port=m_port, source_ip=s_ip6, source_port=s_port - ) - == f"tcp://[{s_ip6}]:{s_port};[{m_ip6}]:{m_port}" - ) - - # source ip and source_port empty - assert ( - salt.transport.zeromq._get_master_uri(master_ip=m_ip, master_port=m_port) - == f"tcp://{m_ip}:{m_port}" - ) - - assert ( - salt.transport.zeromq._get_master_uri(master_ip=m_ip6, master_port=m_port) - == f"tcp://[{m_ip6}]:{m_port}" - ) - - # pass in only source_ip - assert ( - salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port, source_ip=s_ip - ) - == f"tcp://{s_ip}:0;{m_ip}:{m_port}" - ) - - assert ( - salt.transport.zeromq._get_master_uri( - master_ip=m_ip6, master_port=m_port, source_ip=s_ip6 - ) - == f"tcp://[{s_ip6}]:0;[{m_ip6}]:{m_port}" - ) - - # pass in only source_port - assert ( - salt.transport.zeromq._get_master_uri( - master_ip=m_ip, master_port=m_port, source_port=s_port - ) - == f"tcp://0.0.0.0:{s_port};{m_ip}:{m_port}" - ) - - -def test_clear_req_channel_master_uri_override(temp_salt_minion, temp_salt_master): - """ - ensure master_uri kwarg is respected - """ - opts = temp_salt_minion.config.copy() - # minion_config should be 127.0.0.1, we want a different uri that still connects - opts.update( - { - "id": "root", - "transport": "zeromq", - "auth_tries": 1, - "auth_timeout": 5, - "master_ip": "127.0.0.1", - "master_port": temp_salt_master.config["ret_port"], - "master_uri": "tcp://127.0.0.1:{}".format( - temp_salt_master.config["ret_port"] - ), - } - ) - master_uri = "tcp://{master_ip}:{master_port}".format( - master_ip="localhost", master_port=opts["master_port"] - ) - with salt.channel.client.ReqChannel.factory(opts, master_uri=master_uri) as channel: - assert "127.0.0.1" in channel.transport.message_client.addr - - def run_loop_in_thread(loop, evt): """ Run the provided loop until an event is set @@ -449,38 +355,6 @@ class MockSaltMinionMaster: raise tornado.gen.Return((payload, {"fun": "send_clear"})) -@pytest.mark.parametrize("message", ["", [], ()]) -def test_badload(temp_salt_minion, temp_salt_master, message): - """ - Test a variety of bad requests, make sure that we get some sort of error - """ - with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master: - ret = minion_master.channel.send(message, timeout=5, tries=1) - assert ret == "payload and load must be a dict" - - -def test_payload_handling_exception(temp_salt_minion, temp_salt_master): - """ - test of getting exception on payload handling - """ - with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master: - with patch.object(minion_master.mock, "_handle_payload_hook") as _mock: - _mock.side_effect = Exception() - ret = minion_master.channel.send({}, timeout=5, tries=1) - assert ret == "Some exception handling minion payload" - - -def test_serverside_exception(temp_salt_minion, temp_salt_master): - """ - test of getting server side exception on payload handling - """ - with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master: - with patch.object(minion_master.mock, "_handle_payload_hook") as _mock: - _mock.side_effect = tornado.gen.Return(({}, {"fun": "madeup-fun"})) - ret = minion_master.channel.send({}, timeout=5, tries=1) - assert ret == "Server-side exception handling payload" - - def test_zeromq_async_pub_channel_publish_port(temp_salt_master): """ test when connecting that we use the publish_port set in opts when its not 4506 @@ -600,934 +474,6 @@ def test_zeromq_async_pub_channel_filtering_decode_message( assert res.result()["enc"] == "aes" -def test_req_server_chan_encrypt_v2( - pki_dir, encryption_algorithm, signing_algorithm, master_opts -): - loop = tornado.ioloop.IOLoop.current() - master_opts.update( - { - "worker_threads": 1, - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "zmq_monitor": False, - "mworker_queue_niceness": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("master")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - } - ) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - dictkey = "pillar" - nonce = "abcdefg" - pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private( - pillar_data, - dictkey, - "minion", - nonce, - encryption_algorithm=encryption_algorithm, - signing_algorithm=signing_algorithm, - ) - assert "key" in ret - assert dictkey in ret - - key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem"))) - aes = key.decrypt(ret["key"], encryption_algorithm) - pcrypt = salt.crypt.Crypticle(master_opts, aes) - signed_msg = pcrypt.loads(ret[dictkey]) - - assert "sig" in signed_msg - assert "data" in signed_msg - data = salt.payload.loads(signed_msg["data"]) - assert "key" in data - assert data["key"] == ret["key"] - assert "key" in data - assert data["nonce"] == nonce - assert "pillar" in data - assert data["pillar"] == pillar_data - - -def test_req_server_chan_encrypt_v1(pki_dir, encryption_algorithm, master_opts): - loop = tornado.ioloop.IOLoop.current() - master_opts.update( - { - "worker_threads": 1, - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "zmq_monitor": False, - "mworker_queue_niceness": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("master")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - } - ) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - dictkey = "pillar" - nonce = "abcdefg" - pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private( - pillar_data, - dictkey, - "minion", - sign_messages=False, - encryption_algorithm=encryption_algorithm, - ) - - assert "key" in ret - assert dictkey in ret - - key = salt.crypt.PrivateKey(str(pki_dir.joinpath("minion", "minion.pem"))) - aes = key.decrypt(ret["key"], encryption_algorithm) - pcrypt = salt.crypt.Crypticle(master_opts, aes) - data = pcrypt.loads(ret[dictkey]) - assert data == pillar_data - - -def test_req_chan_decode_data_dict_entry_v1( - pki_dir, encryption_algorithm, minion_opts, master_opts -): - mockloop = MagicMock() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.ReqChannel.factory(minion_opts, io_loop=mockloop) - dictkey = "pillar" - target = "minion" - pillar_data = {"pillar1": "meh"} - ret = server._encrypt_private( - pillar_data, - dictkey, - target, - sign_messages=False, - encryption_algorithm=encryption_algorithm, - ) - key = client.auth.get_keys() - aes = key.decrypt(ret["key"], encryption_algorithm) - pcrypt = salt.crypt.Crypticle(client.opts, aes) - ret_pillar_data = pcrypt.loads(ret[dictkey]) - assert ret_pillar_data == pillar_data - - -async def test_req_chan_decode_data_dict_entry_v2(minion_opts, master_opts, pki_dir): - mockloop = MagicMock() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) - - dictkey = "pillar" - target = "minion" - pillar_data = {"pillar1": "meh"} - - # Mock auth and message client. - auth = client.auth - auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) - client.auth = MagicMock() - client.auth.mpub = auth.mpub - client.auth.authenticated = True - client.auth.get_keys = auth.get_keys - client.auth.crypticle.dumps = auth.crypticle.dumps - client.auth.crypticle.loads = auth.crypticle.loads - client.transport = MagicMock() - - print(minion_opts["encryption_algorithm"]) - - @tornado.gen.coroutine - def mocksend(msg, timeout=60, tries=3): - client.transport.msg = msg - load = client.auth.crypticle.loads(msg["load"]) - ret = server._encrypt_private( - pillar_data, - dictkey, - target, - nonce=load["nonce"], - sign_messages=True, - encryption_algorithm=minion_opts["encryption_algorithm"], - signing_algorithm=minion_opts["signing_algorithm"], - ) - raise tornado.gen.Return(ret) - - client.transport.send = mocksend - - # Note the 'ver' value in 'load' does not represent the the 'version' sent - # in the top level of the transport's message. - load = { - "id": target, - "grains": {}, - "saltenv": "base", - "pillarenv": "base", - "pillar_override": True, - "extra_minion_data": {}, - "ver": "2", - "cmd": "_pillar", - } - ret = await client.crypted_transfer_decode_dictentry( - load, - dictkey="pillar", - ) - assert "version" in client.transport.msg - assert client.transport.msg["version"] == 2 - assert ret == {"pillar1": "meh"} - - -async def test_req_chan_decode_data_dict_entry_v2_bad_nonce( - pki_dir, minion_opts, master_opts -): - mockloop = MagicMock() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) - - dictkey = "pillar" - badnonce = "abcdefg" - target = "minion" - pillar_data = {"pillar1": "meh"} - - # Mock auth and message client. - auth = client.auth - auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) - client.auth = MagicMock() - client.auth.mpub = auth.mpub - client.auth.authenticated = True - client.auth.get_keys = auth.get_keys - client.auth.crypticle.dumps = auth.crypticle.dumps - client.auth.crypticle.loads = auth.crypticle.loads - client.transport = MagicMock() - ret = server._encrypt_private( - pillar_data, - dictkey, - target, - nonce=badnonce, - sign_messages=True, - encryption_algorithm=minion_opts["encryption_algorithm"], - signing_algorithm=minion_opts["signing_algorithm"], - ) - - @tornado.gen.coroutine - def mocksend(msg, timeout=60, tries=3): - client.transport.msg = msg - raise tornado.gen.Return(ret) - - client.transport.send = mocksend - - # Note the 'ver' value in 'load' does not represent the the 'version' sent - # in the top level of the transport's message. - load = { - "id": target, - "grains": {}, - "saltenv": "base", - "pillarenv": "base", - "pillar_override": True, - "extra_minion_data": {}, - "ver": "2", - "cmd": "_pillar", - } - - with pytest.raises(salt.crypt.AuthenticationError) as excinfo: - ret = await client.crypted_transfer_decode_dictentry( - load, - dictkey="pillar", - ) - assert "Pillar nonce verification failed." == excinfo.value.message - - -async def test_req_chan_decode_data_dict_entry_v2_bad_signature( - pki_dir, minion_opts, master_opts -): - mockloop = MagicMock() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) - - dictkey = "pillar" - badnonce = "abcdefg" - target = "minion" - pillar_data = {"pillar1": "meh"} - - # Mock auth and message client. - auth = client.auth - auth._crypticle = salt.crypt.Crypticle(minion_opts, AES_KEY) - client.auth = MagicMock() - client.auth.mpub = auth.mpub - client.auth.authenticated = True - client.auth.get_keys = auth.get_keys - client.auth.crypticle.dumps = auth.crypticle.dumps - client.auth.crypticle.loads = auth.crypticle.loads - client.transport = MagicMock() - - @tornado.gen.coroutine - def mocksend(msg, timeout=60, tries=3): - client.transport.msg = msg - load = client.auth.crypticle.loads(msg["load"]) - ret = server._encrypt_private( - pillar_data, - dictkey, - target, - nonce=load["nonce"], - sign_messages=True, - encryption_algorithm=minion_opts["encryption_algorithm"], - signing_algorithm=minion_opts["signing_algorithm"], - ) - - key = client.auth.get_keys() - aes = key.decrypt(ret["key"], minion_opts["encryption_algorithm"]) - pcrypt = salt.crypt.Crypticle(client.opts, aes) - signed_msg = pcrypt.loads(ret[dictkey]) - # Changing the pillar data will cause the signature verification to - # fail. - data = salt.payload.loads(signed_msg["data"]) - data["pillar"] = {"pillar1": "bar"} - signed_msg["data"] = salt.payload.dumps(data) - ret[dictkey] = pcrypt.dumps(signed_msg) - raise tornado.gen.Return(ret) - - client.transport.send = mocksend - - # Note the 'ver' value in 'load' does not represent the the 'version' sent - # in the top level of the transport's message. - load = { - "id": target, - "grains": {}, - "saltenv": "base", - "pillarenv": "base", - "pillar_override": True, - "extra_minion_data": {}, - "ver": "2", - "cmd": "_pillar", - } - - with pytest.raises(salt.crypt.AuthenticationError) as excinfo: - ret = await client.crypted_transfer_decode_dictentry( - load, - dictkey="pillar", - ) - assert "Pillar payload signature failed to validate." == excinfo.value.message - - -async def test_req_chan_decode_data_dict_entry_v2_bad_key( - pki_dir, minion_opts, master_opts -): - mockloop = MagicMock() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=mockloop) - - dictkey = "pillar" - badnonce = "abcdefg" - target = "minion" - pillar_data = {"pillar1": "meh"} - - # Mock auth and message client. - auth = client.auth - auth._crypticle = salt.crypt.Crypticle(master_opts, AES_KEY) - client.auth = MagicMock() - client.auth.mpub = auth.mpub - client.auth.authenticated = True - client.auth.get_keys = auth.get_keys - client.auth.crypticle.dumps = auth.crypticle.dumps - client.auth.crypticle.loads = auth.crypticle.loads - client.transport = MagicMock() - - @tornado.gen.coroutine - def mocksend(msg, timeout=60, tries=3): - client.transport.msg = msg - load = client.auth.crypticle.loads(msg["load"]) - ret = server._encrypt_private( - pillar_data, - dictkey, - target, - nonce=load["nonce"], - sign_messages=True, - encryption_algorithm=minion_opts["encryption_algorithm"], - signing_algorithm=minion_opts["signing_algorithm"], - ) - - mkey = client.auth.get_keys() - aes = mkey.decrypt(ret["key"], minion_opts["encryption_algorithm"]) - pcrypt = salt.crypt.Crypticle(client.opts, aes) - signed_msg = pcrypt.loads(ret[dictkey]) - - # Now encrypt with a different key - key = salt.crypt.Crypticle.generate_key_string() - pcrypt = salt.crypt.Crypticle(master_opts, key) - pubfn = os.path.join(master_opts["pki_dir"], "minions", "minion") - pub = salt.crypt.PublicKey(pubfn) - ret[dictkey] = pcrypt.dumps(signed_msg) - key = salt.utils.stringutils.to_bytes(key) - ret["key"] = pub.encrypt(key, minion_opts["encryption_algorithm"]) - raise tornado.gen.Return(ret) - - client.transport.send = mocksend - - # Note the 'ver' value in 'load' does not represent the the 'version' sent - # in the top level of the transport's message. - load = { - "id": target, - "grains": {}, - "saltenv": "base", - "pillarenv": "base", - "pillar_override": True, - "extra_minion_data": {}, - "ver": "2", - "cmd": "_pillar", - } - try: - with pytest.raises(salt.crypt.AuthenticationError) as excinfo: - await client.crypted_transfer_decode_dictentry( - load, - dictkey="pillar", - ) - assert "Key verification failed." == excinfo.value.message - finally: - client.close() - server.close() - - -async def test_req_serv_auth_v1(pki_dir, minion_opts, master_opts): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - - pub = salt.crypt.get_rsa_pub_key(str(pki_dir.joinpath("minion", "minion.pub"))) - token = salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()) - nonce = uuid.uuid4().hex - - # We need to read the public key with fopen otherwise the newlines might - # not match on windows. - with salt.utils.files.fopen( - str(pki_dir.joinpath("minion", "minion.pub")), "r" - ) as fp: - pub_key = salt.crypt.clean_key(fp.read()) - - load = { - "cmd": "_auth", - "id": "minion", - "token": token, - "pub": pub_key, - "enc_algo": minion_opts["encryption_algorithm"], - "sig_algo": minion_opts["signing_algorithm"], - } - ret = server._auth(load, sign_messages=False) - assert "load" not in ret - - -async def test_req_serv_auth_v2(pki_dir, minion_opts, master_opts): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - - pub = salt.crypt.get_rsa_pub_key(str(pki_dir.joinpath("minion", "minion.pub"))) - token = salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()) - nonce = uuid.uuid4().hex - - # We need to read the public key with fopen otherwise the newlines might - # not match on windows. - with salt.utils.files.fopen( - str(pki_dir.joinpath("minion", "minion.pub")), "r" - ) as fp: - pub_key = fp.read() - - load = { - "cmd": "_auth", - "id": "minion", - "nonce": nonce, - "token": token, - "pub": pub_key, - "enc_algo": minion_opts["encryption_algorithm"], - "sig_algo": minion_opts["signing_algorithm"], - } - ret = server._auth(load, sign_messages=True) - assert "sig" in ret - assert "load" in ret - - -async def test_req_chan_auth_v2(pki_dir, io_loop, minion_opts, master_opts): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - master_opts["master_sign_pubkey"] = False - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - minion_opts["verify_master_pubkey_sign"] = False - minion_opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) - signin_payload = client.auth.minion_sign_in_payload() - pload = client._package_load(signin_payload) - assert "version" in pload - assert pload["version"] == 2 - - ret = server._auth(pload["load"], sign_messages=True) - assert "sig" in ret - ret = client.auth.handle_signin_response(signin_payload, ret) - assert "aes" in ret - assert "master_uri" in ret - assert "publish_port" in ret - - -async def test_req_chan_auth_v2_with_master_signing( - pki_dir, io_loop, minion_opts, master_opts -): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts = dict(master_opts, pki_dir=str(pki_dir.joinpath("master"))) - master_opts["master_sign_pubkey"] = True - master_opts["master_use_pubkey_signature"] = False - master_opts["signing_key_pass"] = "" - master_opts["master_sign_key_name"] = "master_sign" - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - minion_opts["verify_master_pubkey_sign"] = True - minion_opts["always_verify_signature"] = True - minion_opts["master_sign_key_name"] = "master_sign" - minion_opts["master"] = "master" - - assert ( - pki_dir.joinpath("minion", "minion_master.pub").read_text() - == pki_dir.joinpath("master", "master.pub").read_text() - ) - - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) - signin_payload = client.auth.minion_sign_in_payload() - pload = client._package_load(signin_payload) - assert "version" in pload - assert pload["version"] == 2 - - server_reply = server._auth(pload["load"], sign_messages=True) - # With version 2 we always get a clear signed response - assert "enc" in server_reply - assert server_reply["enc"] == "clear" - assert "sig" in server_reply - assert "load" in server_reply - ret = client.auth.handle_signin_response(signin_payload, server_reply) - assert "aes" in ret - assert "master_uri" in ret - assert "publish_port" in ret - - # Now create a new master key pair and try auth with it. - mapriv = pki_dir.joinpath("master", "master.pem") - mapriv.unlink() - mapriv.write_text(MASTER2_PRIV_KEY.strip()) - mapub = pki_dir.joinpath("master", "master.pub") - mapub.unlink() - mapub.write_text(MASTER2_PUB_KEY.strip()) - - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - - signin_payload = client.auth.minion_sign_in_payload() - pload = client._package_load(signin_payload) - server_reply = server._auth(pload["load"], sign_messages=True) - ret = client.auth.handle_signin_response(signin_payload, server_reply) - - assert "aes" in ret - assert "master_uri" in ret - assert "publish_port" in ret - - assert ( - pki_dir.joinpath("minion", "minion_master.pub").read_text() - == pki_dir.joinpath("master", "master.pub").read_text() - ) - - -async def test_req_chan_auth_v2_new_minion_with_master_pub( - pki_dir, io_loop, minion_opts, master_opts -): - - pki_dir.joinpath("master", "minions", "minion").unlink() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - master_opts["master_sign_pubkey"] = False - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - minion_opts["verify_master_pubkey_sign"] = False - minion_opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) - signin_payload = client.auth.minion_sign_in_payload() - pload = client._package_load(signin_payload) - assert "version" in pload - assert pload["version"] == 2 - - ret = server._auth(pload["load"], sign_messages=True) - assert "sig" in ret - ret = client.auth.handle_signin_response(signin_payload, ret) - assert ret == "retry" - - -async def test_req_chan_auth_v2_new_minion_with_master_pub_bad_sig( - pki_dir, io_loop, minion_opts, master_opts -): - - pki_dir.joinpath("master", "minions", "minion").unlink() - - # Give the master a different key than the minion has. - mapriv = pki_dir.joinpath("master", "master.pem") - mapriv.unlink() - mapriv.write_text(MASTER2_PRIV_KEY.strip()) - mapub = pki_dir.joinpath("master", "master.pub") - mapub.unlink() - mapub.write_text(MASTER2_PUB_KEY.strip()) - - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update( - pki_dir=str(pki_dir.joinpath("master")), master_sign_pubkey=False - ) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - minion_opts["verify_master_pubkey_sign"] = False - minion_opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) - signin_payload = client.auth.minion_sign_in_payload() - pload = client._package_load(signin_payload) - assert "version" in pload - assert pload["version"] == 2 - - ret = server._auth(pload["load"], sign_messages=True) - assert "sig" in ret - with pytest.raises(salt.crypt.SaltClientError, match="Invalid signature"): - ret = client.auth.handle_signin_response(signin_payload, ret) - - -async def test_req_chan_auth_v2_new_minion_without_master_pub( - minion_opts, - master_opts, - pki_dir, - io_loop, -): - - pki_dir.joinpath("master", "minions", "minion").unlink() - pki_dir.joinpath("minion", "minion_master.pub").unlink() - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "publish_port": 4505, - "auth_mode": 1, - "acceptance_wait_time": 3, - "acceptance_wait_time_max": 3, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - master_opts["master_sign_pubkey"] = False - server = salt.channel.server.ReqServerChannel.factory(master_opts) - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - minion_opts["verify_master_pubkey_sign"] = False - minion_opts["always_verify_signature"] = False - client = salt.channel.client.AsyncReqChannel.factory(minion_opts, io_loop=io_loop) - signin_payload = client.auth.minion_sign_in_payload() - pload = client._package_load(signin_payload) - try: - assert "version" in pload - assert pload["version"] == 2 - - ret = server._auth(pload["load"], sign_messages=True) - assert "sig" in ret - ret = client.auth.handle_signin_response(signin_payload, ret) - assert ret == "retry" - finally: - client.close() - server.close() - - async def test_req_server_garbage_request(io_loop): """ Validate invalid msgpack messages will not raise exceptions in the @@ -1639,303 +585,3 @@ async def test_unclosed_publish_client(minion_opts, io_loop): client.__del__() # pylint: disable=unnecessary-dunder-call finally: client.close() - - -@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only run on fips enabled platforms") -def test_req_server_auth_unsupported_sig_algo( - pki_dir, minion_opts, master_opts, caplog -): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - pub = salt.crypt.PublicKey(str(pki_dir.joinpath("master", "master.pub"))) - token = pub.encrypt( - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - algorithm=minion_opts["encryption_algorithm"], - ) - nonce = uuid.uuid4().hex - - # We need to read the public key with fopen otherwise the newlines might - # not match on windows. - with salt.utils.files.fopen( - str(pki_dir.joinpath("minion", "minion.pub")), "r" - ) as fp: - pub_key = salt.crypt.clean_key(fp.read()) - - load = { - "version": 2, - "cmd": "_auth", - "id": "minion", - "token": token, - "pub": pub_key, - "nonce": "asdfse", - "enc_algo": minion_opts["encryption_algorithm"], - "sig_algo": salt.crypt.PKCS1v15_SHA1, - } - with caplog.at_level(logging.INFO): - ret = server._auth(load, sign_messages=True) - assert ( - "Minion tried to authenticate with unsupported signing algorithm: PKCS1v15-SHA1" - in caplog.text - ) - assert "load" in ret - assert "ret" in ret["load"] - assert ret["load"]["ret"] == "bad sig algo" - - -def test_req_server_auth_garbage_sig_algo(pki_dir, minion_opts, master_opts, caplog): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - pub = salt.crypt.PublicKey(str(pki_dir.joinpath("master", "master.pub"))) - token = pub.encrypt( - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - algorithm=minion_opts["encryption_algorithm"], - ) - nonce = uuid.uuid4().hex - - # We need to read the public key with fopen otherwise the newlines might - # not match on windows. - with salt.utils.files.fopen( - str(pki_dir.joinpath("minion", "minion.pub")), "r" - ) as fp: - pub_key = salt.crypt.clean_key(fp.read()) - - load = { - "version": 2, - "cmd": "_auth", - "id": "minion", - "token": token, - "pub": pub_key, - "nonce": "asdfse", - "enc_algo": minion_opts["encryption_algorithm"], - "sig_algo": "IAMNOTANALGO", - } - with caplog.at_level(logging.INFO): - ret = server._auth(load, sign_messages=True) - assert ( - "Minion tried to authenticate with unsupported signing algorithm: IAMNOTANALGO" - in caplog.text - ) - assert "load" in ret - assert "ret" in ret["load"] - assert ret["load"]["ret"] == "bad sig algo" - - -@pytest.mark.skipif(not FIPS_TESTRUN, reason="Only run on fips enabled platforms") -def test_req_server_auth_unsupported_enc_algo( - pki_dir, minion_opts, master_opts, caplog -): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - import tests.pytests.unit.crypt - - pub = tests.pytests.unit.crypt.LegacyPublicKey( - str(pki_dir.joinpath("master", "master.pub")) - ) - token = pub.encrypt( - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ) - nonce = uuid.uuid4().hex - - # We need to read the public key with fopen otherwise the newlines might - # not match on windows. - with salt.utils.files.fopen( - str(pki_dir.joinpath("minion", "minion.pub")), "r" - ) as fp: - pub_key = salt.crypt.clean_key(fp.read()) - - load = { - "version": 2, - "cmd": "_auth", - "id": "minion", - "token": token, - "pub": pub_key, - "nonce": "asdfse", - "enc_algo": "OAEP-SHA1", - "sig_algo": minion_opts["signing_algorithm"], - } - with caplog.at_level(logging.INFO): - ret = server._auth(load, sign_messages=True) - assert ( - "Minion minion tried to authenticate with unsupported encryption algorithm: OAEP-SHA1" - in caplog.text - ) - assert "load" in ret - assert "ret" in ret["load"] - assert ret["load"]["ret"] == "bad enc algo" - - -def test_req_server_auth_garbage_enc_algo(pki_dir, minion_opts, master_opts, caplog): - minion_opts.update( - { - "master_uri": "tcp://127.0.0.1:4506", - "interface": "127.0.0.1", - "ret_port": 4506, - "ipv6": False, - "sock_dir": ".", - "pki_dir": str(pki_dir.joinpath("minion")), - "id": "minion", - "__role": "minion", - "keysize": 4096, - "max_minions": 0, - "auto_accept": False, - "open_mode": False, - "key_pass": None, - "master_sign_pubkey": False, - "publish_port": 4505, - "auth_mode": 1, - } - ) - SMaster.secrets["aes"] = { - "secret": multiprocessing.Array( - ctypes.c_char, - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ), - "reload": salt.crypt.Crypticle.generate_key_string, - } - master_opts.update(pki_dir=str(pki_dir.joinpath("master"))) - server = salt.channel.server.ReqServerChannel.factory(master_opts) - - server.auto_key = salt.daemons.masterapi.AutoKey(server.opts) - server.cache_cli = False - server.event = salt.utils.event.get_master_event( - master_opts, master_opts["sock_dir"], listen=False - ) - server.master_key = salt.crypt.MasterKeys(server.opts) - import tests.pytests.unit.crypt - - pub = tests.pytests.unit.crypt.LegacyPublicKey( - str(pki_dir.joinpath("master", "master.pub")) - ) - token = pub.encrypt( - salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), - ) - nonce = uuid.uuid4().hex - - # We need to read the public key with fopen otherwise the newlines might - # not match on windows. - with salt.utils.files.fopen( - str(pki_dir.joinpath("minion", "minion.pub")), "r" - ) as fp: - pub_key = salt.crypt.clean_key(fp.read()) - - load = { - "version": 2, - "cmd": "_auth", - "id": "minion", - "token": token, - "pub": pub_key, - "nonce": "asdfse", - "enc_algo": "IAMNOTAENCALGO", - "sig_algo": minion_opts["signing_algorithm"], - } - with caplog.at_level(logging.INFO): - ret = server._auth(load, sign_messages=True) - assert ( - "Minion minion tried to authenticate with unsupported encryption algorithm: IAMNOTAENCALGO" - in caplog.text - ) - assert "load" in ret - assert "ret" in ret["load"] - assert ret["load"]["ret"] == "bad enc algo" From 1532472dcbd34c59092074e12b9b66234feae67c Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 12:42:24 -0700 Subject: [PATCH 123/160] salt.crypt.private_encrypt is no longer needed The class moethod salt.crypt.PrivateKey.encrypt replaces salt.crypt.private_encrypt --- salt/channel/server.py | 6 ++---- salt/crypt.py | 14 +------------- 2 files changed, 3 insertions(+), 17 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index d54fafbf827..d5f6638ca5e 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -733,7 +733,7 @@ class ReqServerChannel: # Be aggressive about the signature digest = salt.utils.stringutils.to_bytes(hashlib.sha256(aes).hexdigest()) - ret["sig"] = salt.crypt.private_encrypt(self.master_key.key, digest) + ret["sig"] = self.master_key.key.encrypt(digest) eload = {"result": True, "act": "accept", "id": load["id"], "pub": load["pub"]} if self.opts.get("auth_events") is True: self.event.fire_event(eload, salt.utils.event.tagify(prefix="auth")) @@ -990,9 +990,7 @@ class MasterPubServerChannel: ) data["peers"][peer] = { "aes": pub.encrypt(aes), - "sig": salt.crypt.private_encrypt( - self.master_key.master_key, digest - ), + "sig": self.master_key.master_key.encrypt(digest), } else: log.warning("Peer key missing %r", peer_pub) diff --git a/salt/crypt.py b/salt/crypt.py index 1589a10e15d..d0a8d232a9f 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -426,18 +426,6 @@ def gen_signature(priv_path, pub_path, sign_path, passphrase=None): return True -def private_encrypt(key, message): - """ - Generate an M2Crypto-compatible signature - - :param Crypto.PublicKey.RSA._RSAobj key: The RSA key object - :param str message: The message to sign - :rtype: str - :return: The signature, or an empty string if the signature operation failed - """ - return key.encrypt(message) - - def pwdata_decrypt(rsa_key, pwdata): key = serialization.load_pem_private_key(rsa_key.encode(), password=None) password = key.decrypt( @@ -1110,7 +1098,7 @@ class AsyncAuth: :return: Encrypted token :rtype: str """ - return private_encrypt(self.get_keys(), clear_tok) + return self.get_keys().encrypt(clear_tok) def minion_sign_in_payload(self): """ From 29bb08c6620304778c06159fc68b82373b3cd2fd Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 23 Jun 2024 04:09:07 -0700 Subject: [PATCH 124/160] Remove duplicate tests that are out of date --- tests/pytests/unit/transport/test_zeromq.py | 122 +------------------- 1 file changed, 1 insertion(+), 121 deletions(-) diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 18a98f439f3..609b70ab48a 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -1,5 +1,4 @@ import ctypes -import hashlib import logging import multiprocessing import threading @@ -18,7 +17,7 @@ import salt.utils.process import salt.utils.stringutils from salt.master import SMaster from tests.conftest import FIPS_TESTRUN -from tests.support.mock import AsyncMock, MagicMock, patch +from tests.support.mock import AsyncMock, MagicMock log = logging.getLogger(__name__) @@ -355,125 +354,6 @@ class MockSaltMinionMaster: raise tornado.gen.Return((payload, {"fun": "send_clear"})) -def test_zeromq_async_pub_channel_publish_port(temp_salt_master): - """ - test when connecting that we use the publish_port set in opts when its not 4506 - """ - opts = dict( - temp_salt_master.config.copy(), - ipc_mode="ipc", - pub_hwm=0, - recon_randomize=False, - publish_port=455505, - recon_default=1, - recon_max=2, - master_ip="127.0.0.1", - acceptance_wait_time=5, - acceptance_wait_time_max=5, - sign_pub_messages=False, - ) - opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts) - ioloop = tornado.ioloop.IOLoop() - transport = salt.transport.zeromq.PublishClient(opts, ioloop) - with transport: - patch_socket = MagicMock(return_value=True) - patch_auth = MagicMock(return_value=True) - with patch.object(transport, "_socket", patch_socket): - transport.connect(455505) - assert str(opts["publish_port"]) in patch_socket.mock_calls[0][1][0] - - -def test_zeromq_async_pub_channel_filtering_decode_message_no_match( - temp_salt_master, -): - """ - test zeromq PublishClient _decode_messages when - zmq_filtering enabled and minion does not match - """ - message = [ - b"4f26aeafdb2367620a393c973eddbe8f8b846eb", - b"\x82\xa3enc\xa3aes\xa4load\xda\x00`\xeeR\xcf" - b"\x0eaI#V\x17if\xcf\xae\x05\xa7\xb3bN\xf7\xb2\xe2" - b'\xd0sF\xd1\xd4\xecB\xe8\xaf"/*ml\x80Q3\xdb\xaexg' - b"\x8e\x8a\x8c\xd3l\x03\\,J\xa7\x01i\xd1:]\xe3\x8d" - b"\xf4\x03\x88K\x84\n`\xe8\x9a\xad\xad\xc6\x8ea\x15>" - b"\x92m\x9e\xc7aM\x11?\x18;\xbd\x04c\x07\x85\x99\xa3\xea[\x00D", - ] - - opts = dict( - temp_salt_master.config.copy(), - ipc_mode="ipc", - pub_hwm=0, - zmq_filtering=True, - recon_randomize=False, - recon_default=1, - recon_max=2, - master_ip="127.0.0.1", - acceptance_wait_time=5, - acceptance_wait_time_max=5, - sign_pub_messages=False, - ) - opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts) - - ioloop = tornado.ioloop.IOLoop() - channel = salt.transport.zeromq.PublishClient(opts, ioloop) - with channel: - with patch( - "salt.crypt.AsyncAuth.crypticle", - MagicMock(return_value={"tgt_type": "glob", "tgt": "*", "jid": 1}), - ): - res = channel._decode_messages(message) - assert res.result() is None - - -def test_zeromq_async_pub_channel_filtering_decode_message( - temp_salt_master, temp_salt_minion -): - """ - test AsyncZeroMQPublishClient _decode_messages when zmq_filtered enabled - """ - minion_hexid = salt.utils.stringutils.to_bytes( - hashlib.sha1(salt.utils.stringutils.to_bytes(temp_salt_minion.id)).hexdigest() - ) - - message = [ - minion_hexid, - b"\x82\xa3enc\xa3aes\xa4load\xda\x00`\xeeR\xcf" - b"\x0eaI#V\x17if\xcf\xae\x05\xa7\xb3bN\xf7\xb2\xe2" - b'\xd0sF\xd1\xd4\xecB\xe8\xaf"/*ml\x80Q3\xdb\xaexg' - b"\x8e\x8a\x8c\xd3l\x03\\,J\xa7\x01i\xd1:]\xe3\x8d" - b"\xf4\x03\x88K\x84\n`\xe8\x9a\xad\xad\xc6\x8ea\x15>" - b"\x92m\x9e\xc7aM\x11?\x18;\xbd\x04c\x07\x85\x99\xa3\xea[\x00D", - ] - - opts = dict( - temp_salt_master.config.copy(), - id=temp_salt_minion.id, - ipc_mode="ipc", - pub_hwm=0, - zmq_filtering=True, - recon_randomize=False, - recon_default=1, - recon_max=2, - master_ip="127.0.0.1", - acceptance_wait_time=5, - acceptance_wait_time_max=5, - sign_pub_messages=False, - ) - opts["master_uri"] = "tcp://{interface}:{publish_port}".format(**opts) - - ioloop = tornado.ioloop.IOLoop() - channel = salt.transport.zeromq.PublishClient(opts, ioloop) - with channel: - with patch( - "salt.crypt.AsyncAuth.crypticle", - MagicMock(return_value={"tgt_type": "glob", "tgt": "*", "jid": 1}), - ) as mock_test: - res = channel._decode_messages(message) - - assert res.result()["enc"] == "aes" - - async def test_req_server_garbage_request(io_loop): """ Validate invalid msgpack messages will not raise exceptions in the From 57779dea8a99469b3d7a0f2ee513d74fa541ec32 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 23 Jun 2024 06:13:22 -0700 Subject: [PATCH 125/160] Fim master key decrypt --- salt/channel/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index d5f6638ca5e..11f62a9de09 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -1085,7 +1085,7 @@ class MasterPubServerChannel: peer = data["peer_id"] aes = data["peers"][self.opts["id"]]["aes"] sig = data["peers"][self.opts["id"]]["sig"] - key_str = self.master_key.master_private_decrypt(aes) + key_str = self.master_key.master_key.decrypt(aes) digest = salt.utils.stringutils.to_bytes( hashlib.sha256(key_str).hexdigest() ) From 1082ce3c5f9ea2ffdda2ab0198380dff36e73299 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 24 Jun 2024 01:18:53 -0700 Subject: [PATCH 126/160] Mulitple fips test run fixes --- tests/pytests/functional/channel/conftest.py | 8 +++++ tests/pytests/integration/master/test_peer.py | 23 +++++++++++++++ .../pytests/integration/runners/test_match.py | 29 ++++++++++++++++++- tests/pytests/unit/test_request_channel.py | 12 +++++++- 4 files changed, 70 insertions(+), 2 deletions(-) diff --git a/tests/pytests/functional/channel/conftest.py b/tests/pytests/functional/channel/conftest.py index 387e3bcf4e5..e7ec5a6e32e 100644 --- a/tests/pytests/functional/channel/conftest.py +++ b/tests/pytests/functional/channel/conftest.py @@ -7,6 +7,7 @@ from saltfactories.utils import random_string import salt.crypt import salt.master import salt.utils.stringutils +from tests.conftest import FIPS_TESTRUN @pytest.fixture(autouse=True) @@ -42,6 +43,10 @@ def salt_master(salt_factories, transport): "transport": transport, "auto_accept": True, "sign_pub_messages": False, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } factory = salt_factories.salt_master_daemon( random_string(f"server-{transport}-master-"), @@ -59,6 +64,9 @@ def salt_minion(salt_master, transport): "auth_timeout": 5, "auth_tries": 1, "master_uri": f"tcp://127.0.0.1:{salt_master.config['ret_port']}", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = salt_master.salt_minion_daemon( random_string("server-{transport}-minion-"), diff --git a/tests/pytests/integration/master/test_peer.py b/tests/pytests/integration/master/test_peer.py index 38f44c2abf0..39baefb98ca 100644 --- a/tests/pytests/integration/master/test_peer.py +++ b/tests/pytests/integration/master/test_peer.py @@ -3,6 +3,8 @@ import shutil import pytest from saltfactories.utils import random_string +from tests.conftest import FIPS_TESTRUN + @pytest.fixture(scope="module") def pillar_state_tree(tmp_path_factory): @@ -36,6 +38,12 @@ def peer_salt_master( factory = salt_factories.salt_master_daemon( random_string("peer-comm-master", uppercase=False), defaults=peer_salt_master_config, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with factory.started(): yield factory @@ -47,6 +55,11 @@ def peer_salt_minion_1(peer_salt_master): factory = peer_salt_master.salt_minion_daemon( random_string("peer-comm-minion-1", uppercase=False), defaults={"open_mode": True, "grains": {"hello_peer": "beer"}}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All @@ -62,6 +75,11 @@ def peer_salt_minion_2(peer_salt_master): factory = peer_salt_master.salt_minion_daemon( random_string("peer-comm-minion-2", uppercase=False), defaults={"open_mode": True}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All @@ -77,6 +95,11 @@ def peer_salt_minion_3(peer_salt_master): factory = peer_salt_master.salt_minion_daemon( random_string("peer-comm-minion-3", uppercase=False), defaults={"open_mode": True}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", + }, ) with factory.started(): # Sync All diff --git a/tests/pytests/integration/runners/test_match.py b/tests/pytests/integration/runners/test_match.py index 7a76ff59efe..96f6c6bc70a 100644 --- a/tests/pytests/integration/runners/test_match.py +++ b/tests/pytests/integration/runners/test_match.py @@ -6,6 +6,8 @@ import logging import pytest +from tests.conftest import FIPS_TESTRUN + log = logging.getLogger(__name__) pytestmark = [ @@ -70,7 +72,14 @@ def pillar_tree(match_salt_master, match_salt_minion_alice, match_salt_minion_ev @pytest.fixture(scope="class") def match_salt_master(salt_factories, match_master_config): factory = salt_factories.salt_master_daemon( - "match-master", defaults=match_master_config + "match-master", + defaults=match_master_config, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with factory.started(): yield factory @@ -82,6 +91,12 @@ def match_salt_minion_alice(match_salt_master): factory = match_salt_master.salt_minion_daemon( "match-minion-alice", defaults={"open_mode": True, "grains": {"role": "alice"}}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with factory.started(): # Sync All @@ -97,6 +112,12 @@ def match_salt_minion_eve(match_salt_master): factory = match_salt_master.salt_minion_daemon( "match-minion-eve", defaults={"open_mode": True, "grains": {"role": "eve"}}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with factory.started(): # Sync All @@ -112,6 +133,12 @@ def match_salt_minion_bob(match_salt_master): factory = match_salt_master.salt_minion_daemon( "match-minion-bob", defaults={"open_mode": True}, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with factory.started(): # Sync All diff --git a/tests/pytests/unit/test_request_channel.py b/tests/pytests/unit/test_request_channel.py index 624c1f4adc1..6a753676494 100644 --- a/tests/pytests/unit/test_request_channel.py +++ b/tests/pytests/unit/test_request_channel.py @@ -743,7 +743,13 @@ async def test_req_chan_decode_data_dict_entry_v2_bad_nonce( client.transport = MagicMock() real_transport.close() ret = server._encrypt_private( - pillar_data, dictkey, target, nonce=badnonce, sign_messages=True + pillar_data, + dictkey, + target, + nonce=badnonce, + sign_messages=True, + encryption_algorithm=minion_opts["encryption_algorithm"], + signing_algorithm=minion_opts["signing_algorithm"], ) @tornado.gen.coroutine @@ -1014,6 +1020,8 @@ async def test_req_serv_auth_v1(minion_opts, master_opts, pki_dir): "id": "minion", "token": token, "pub": pub_key, + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": minion_opts["signing_algorithm"], } try: ret = server._auth(load, sign_messages=False) @@ -1073,6 +1081,8 @@ async def test_req_serv_auth_v2(minion_opts, master_opts, pki_dir): "nonce": nonce, "token": token, "pub": pub_key, + "enc_algo": minion_opts["encryption_algorithm"], + "sig_algo": minion_opts["signing_algorithm"], } try: ret = server._auth(load, sign_messages=True) From 0559f97be36af415829ff82121916a3b81d83fa0 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 24 Jun 2024 02:53:31 -0700 Subject: [PATCH 127/160] Fix deltaproxy and cluster tests in fips mode. --- salt/channel/server.py | 8 +++--- .../integration/cli/test_salt_deltaproxy.py | 27 +++++++++++++++++-- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/salt/channel/server.py b/salt/channel/server.py index 11f62a9de09..5fcca7fd59f 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -989,7 +989,7 @@ class MasterPubServerChannel: hashlib.sha256(aes).hexdigest() ) data["peers"][peer] = { - "aes": pub.encrypt(aes), + "aes": pub.encrypt(aes, algorithm="OAEP-SHA224"), "sig": self.master_key.master_key.encrypt(digest), } else: @@ -1047,7 +1047,7 @@ class MasterPubServerChannel: self.pushers = [] self.auth_errors = {} for peer in self.opts.get("cluster_peers", []): - pusher = salt.transport.tcp.TCPPublishServer( + pusher = salt.transport.tcp.PublishServer( self.opts, pull_host=peer, pull_port=tcp_master_pool_port, @@ -1085,7 +1085,9 @@ class MasterPubServerChannel: peer = data["peer_id"] aes = data["peers"][self.opts["id"]]["aes"] sig = data["peers"][self.opts["id"]]["sig"] - key_str = self.master_key.master_key.decrypt(aes) + key_str = self.master_key.master_key.decrypt( + aes, algorithm="OAEP-SHA224" + ) digest = salt.utils.stringutils.to_bytes( hashlib.sha256(key_str).hexdigest() ) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index 8c04fc2f61a..e9f68d7c9b8 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -52,7 +52,15 @@ def test_exit_status_no_proxyid(salt_master, proxy_minion_id): with pytest.raises(FactoryNotStarted) as exc: factory = salt_master.salt_proxy_minion_daemon( - proxy_minion_id, include_proxyid_cli_flag=False, defaults=config_defaults + proxy_minion_id, + include_proxyid_cli_flag=False, + defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) factory.start(start_timeout=10, max_start_attempts=1) @@ -76,6 +84,13 @@ def test_exit_status_unknown_user(salt_master, proxy_minion_id): proxy_minion_id, overrides={"user": "unknown-user"}, defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) factory.start(start_timeout=10, max_start_attempts=1) @@ -94,7 +109,15 @@ def test_exit_status_unknown_argument(salt_master, proxy_minion_id): with pytest.raises(FactoryNotStarted) as exc: factory = salt_master.salt_proxy_minion_daemon( - proxy_minion_id, defaults=config_defaults + proxy_minion_id, + defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) factory.start("--unknown-argument", start_timeout=10, max_start_attempts=1) From 2a04013aae07b18fd7d2bd35afff84c6db75a1a3 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 24 Jun 2024 04:36:31 -0700 Subject: [PATCH 128/160] Fix duplicate kwarg --- tests/pytests/integration/cli/test_salt_deltaproxy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index e9f68d7c9b8..84d64dacaec 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -82,9 +82,9 @@ def test_exit_status_unknown_user(salt_master, proxy_minion_id): with pytest.raises(FactoryNotStarted) as exc: factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, - overrides={"user": "unknown-user"}, defaults=config_defaults, overrides={ + "user": "unknown-user", "fips_mode": FIPS_TESTRUN, "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", "signing_algorithm": ( From 378b1061e820141667ef6d8f7384b8fa4e9c8b9a Mon Sep 17 00:00:00 2001 From: bdrx312 Date: Mon, 29 Apr 2024 03:33:37 -0400 Subject: [PATCH 129/160] Fix incorrect version argument will be ignored warning --- changelog/64563.fixed.md | 1 + salt/modules/yumpkg.py | 4 ++-- tests/pytests/functional/states/test_pkg.py | 3 ++- 3 files changed, 5 insertions(+), 3 deletions(-) create mode 100644 changelog/64563.fixed.md diff --git a/changelog/64563.fixed.md b/changelog/64563.fixed.md new file mode 100644 index 00000000000..fadd9721fed --- /dev/null +++ b/changelog/64563.fixed.md @@ -0,0 +1 @@ +Fixed incorrect version argument will be ignored for multiple package targets warning when using pkgs argument to yumpkg module. diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index b4ffdc72ce5..a301d93eac3 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -1424,8 +1424,8 @@ def install( 'version': '', 'arch': ''}}} """ - if "version" in kwargs: - kwargs["version"] = str(kwargs["version"]) + if (version := kwargs.get("version")) is not None: + kwargs["version"] = str(version) options = _get_options(**kwargs) if salt.utils.data.is_true(refresh): diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py index 67ca2500fe3..01d6d5c7f5f 100644 --- a/tests/pytests/functional/states/test_pkg.py +++ b/tests/pytests/functional/states/test_pkg.py @@ -238,7 +238,7 @@ def test_pkg_002_installed_with_version(PKG_TARGETS, states, latest_version): @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") @pytest.mark.slow_test -def test_pkg_003_installed_multipkg(PKG_TARGETS, modules, states): +def test_pkg_003_installed_multipkg(caplog, PKG_TARGETS, modules, states): """ This is a destructive test as it installs and then removes two packages """ @@ -254,6 +254,7 @@ def test_pkg_003_installed_multipkg(PKG_TARGETS, modules, states): try: ret = states.pkg.installed(name=None, pkgs=PKG_TARGETS, refresh=False) assert ret.result is True + assert "WARNING" not in caplog.text finally: ret = states.pkg.removed(name=None, pkgs=PKG_TARGETS) assert ret.result is True From 43565e2210d3b76ed496c9517639754e6cb7f3fe Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 24 Jun 2024 09:20:28 -0700 Subject: [PATCH 130/160] Fix syndic test on fips platforms --- tests/pytests/scenarios/syndic/conftest.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/pytests/scenarios/syndic/conftest.py b/tests/pytests/scenarios/syndic/conftest.py index b014aa2bf0e..5492bc23946 100644 --- a/tests/pytests/scenarios/syndic/conftest.py +++ b/tests/pytests/scenarios/syndic/conftest.py @@ -2,6 +2,8 @@ import logging import pytest +from tests.conftest import FIPS_TESTRUN + log = logging.getLogger(__name__) @@ -14,6 +16,10 @@ def master(request, salt_factories): "interface": "127.0.0.1", "auto_accept": True, "order_masters": True, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } factory = salt_factories.salt_master_daemon( "master", @@ -49,10 +55,17 @@ def syndic(master, salt_factories): "auto_accept": True, "syndic_master": f"{addr}", "syndic_master_port": f"{ret_port}", + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } minion_overrides = { "master": "127.0.0.2", "publish_port": f"{port}", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = master.salt_syndic_daemon( "syndic", @@ -74,6 +87,9 @@ def minion(syndic, salt_factories): addr = syndic.master.config["interface"] config_overrides = { "master": f"{addr}:{port}", + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = syndic.master.salt_minion_daemon( "minion", From 1e2e79db754852a44afa1b3f2a193dbc75fbaf13 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 04:03:02 -0700 Subject: [PATCH 131/160] Use FullArgSpec as ArgSpec is deprecated --- tests/unit/states/test_module.py | 22 ++++++++++++++++++---- 1 file changed, 18 insertions(+), 4 deletions(-) diff --git a/tests/unit/states/test_module.py b/tests/unit/states/test_module.py index 169106fcfd1..4fd0dcad0bc 100644 --- a/tests/unit/states/test_module.py +++ b/tests/unit/states/test_module.py @@ -3,7 +3,7 @@ """ import logging -from inspect import ArgSpec +from inspect import FullArgSpec import salt.states.module as module from tests.support.mixins import LoaderModuleMockMixin @@ -116,11 +116,25 @@ class ModuleStateTest(TestCase, LoaderModuleMockMixin): @classmethod def setUpClass(cls): - cls.aspec = ArgSpec( - args=["hello", "world"], varargs=None, keywords=None, defaults=False + cls.aspec = FullArgSpec( + args=["hello", "world"], + varargs=None, + varkw=None, + defaults=False, + kwonlyargs=None, + kwonlydefaults=None, + annotations=None, ) - cls.bspec = ArgSpec(args=[], varargs="names", keywords="kwargs", defaults=None) + cls.bspec = FullArgSpec( + args=[], + varargs="names", + varkw=None, + defaults=None, + kwonlyargs="kwargs", + kwonlydefaults=None, + annotations=None, + ) @classmethod def tearDownClass(cls): From 3735415fbcc8611af320f4986831c8a873919a30 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 24 Jun 2024 03:22:49 -0700 Subject: [PATCH 132/160] Bump requests and docker requirements --- requirements/base.txt | 3 ++- requirements/pytest.txt | 3 ++- requirements/static/ci/lint.in | 3 ++- requirements/static/ci/py3.10/cloud.txt | 6 ++---- requirements/static/ci/py3.10/darwin.txt | 9 +++------ requirements/static/ci/py3.10/docs.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 9 +++------ requirements/static/ci/py3.10/lint.txt | 6 ++---- requirements/static/ci/py3.10/linux.txt | 9 +++------ requirements/static/ci/py3.10/windows.txt | 9 +++------ requirements/static/ci/py3.11/cloud.txt | 6 ++---- requirements/static/ci/py3.11/darwin.txt | 9 +++------ requirements/static/ci/py3.11/docs.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 9 +++------ requirements/static/ci/py3.11/lint.txt | 6 ++---- requirements/static/ci/py3.11/linux.txt | 9 +++------ requirements/static/ci/py3.11/windows.txt | 9 +++------ requirements/static/ci/py3.12/cloud.txt | 6 ++---- requirements/static/ci/py3.12/darwin.txt | 9 +++------ requirements/static/ci/py3.12/docs.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 9 +++------ requirements/static/ci/py3.12/lint.txt | 6 ++---- requirements/static/ci/py3.12/linux.txt | 9 +++------ requirements/static/ci/py3.12/windows.txt | 9 +++------ requirements/static/ci/py3.7/cloud.txt | 4 ++-- requirements/static/ci/py3.7/docs.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 4 ++-- requirements/static/ci/py3.7/linux.txt | 4 ++-- requirements/static/ci/py3.7/windows.txt | 4 ++-- requirements/static/ci/py3.8/cloud.txt | 6 ++---- requirements/static/ci/py3.8/docs.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 9 +++------ requirements/static/ci/py3.8/lint.txt | 6 ++---- requirements/static/ci/py3.8/linux.txt | 9 +++------ requirements/static/ci/py3.8/windows.txt | 9 +++------ requirements/static/ci/py3.9/cloud.txt | 6 ++---- requirements/static/ci/py3.9/darwin.txt | 9 +++------ requirements/static/ci/py3.9/docs.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 9 +++------ requirements/static/ci/py3.9/lint.txt | 6 ++---- requirements/static/ci/py3.9/linux.txt | 9 +++------ requirements/static/ci/py3.9/windows.txt | 9 +++------ requirements/static/pkg/py3.10/darwin.txt | 2 +- requirements/static/pkg/py3.10/freebsd.txt | 2 +- requirements/static/pkg/py3.10/linux.txt | 2 +- requirements/static/pkg/py3.10/windows.txt | 2 +- requirements/static/pkg/py3.11/darwin.txt | 2 +- requirements/static/pkg/py3.11/freebsd.txt | 2 +- requirements/static/pkg/py3.11/linux.txt | 2 +- requirements/static/pkg/py3.11/windows.txt | 2 +- requirements/static/pkg/py3.12/darwin.txt | 2 +- requirements/static/pkg/py3.12/freebsd.txt | 2 +- requirements/static/pkg/py3.12/linux.txt | 2 +- requirements/static/pkg/py3.12/windows.txt | 2 +- requirements/static/pkg/py3.7/freebsd.txt | 2 +- requirements/static/pkg/py3.7/linux.txt | 2 +- requirements/static/pkg/py3.7/windows.txt | 2 +- requirements/static/pkg/py3.8/freebsd.txt | 2 +- requirements/static/pkg/py3.8/linux.txt | 2 +- requirements/static/pkg/py3.8/windows.txt | 2 +- requirements/static/pkg/py3.9/darwin.txt | 2 +- requirements/static/pkg/py3.9/freebsd.txt | 2 +- requirements/static/pkg/py3.9/linux.txt | 2 +- requirements/static/pkg/py3.9/windows.txt | 2 +- 64 files changed, 119 insertions(+), 193 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 31d4f29be11..73010291794 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -5,7 +5,8 @@ jmespath msgpack>=1.0.0 PyYAML MarkupSafe -requests>=1.0.0 +requests>=2.31.0 ; python_version < '3.8' +requests>=2.32.0 ; python_version >= '3.8' distro>=1.0.1 psutil>=5.0.0 packaging>=21.3 diff --git a/requirements/pytest.txt b/requirements/pytest.txt index d53137d6601..ce8b9569125 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -1,6 +1,7 @@ mock >= 3.0.0 # PyTest -docker +docker >= 7.1.0; python_version >= '3.8' +docker < 7.1.0; python_version < '3.8' pytest >= 7.2.0 pytest-salt-factories >= 1.0.0 pytest-helpers-namespace >= 2019.1.8 diff --git a/requirements/static/ci/lint.in b/requirements/static/ci/lint.in index 977c91fcd47..e715c2fabe5 100644 --- a/requirements/static/ci/lint.in +++ b/requirements/static/ci/lint.in @@ -1,7 +1,8 @@ # Lint requirements --constraint=./py{py_version}/{platform}.txt -docker +docker >= 7.1.0; python_version >= '3.8' +docker < 7.1.0; python_version < '3.8' pylint~=3.1.0 SaltPyLint>=2024.2.2 toml diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 0eb2d5177d6..a997fa33723 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -152,7 +152,7 @@ dnspython==2.6.1 # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt @@ -363,7 +363,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -563,7 +562,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt @@ -716,7 +715,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.10/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 129898b242a..c830a95799b 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -111,7 +111,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -260,7 +260,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 # via @@ -398,7 +397,7 @@ pyzmq==25.1.2 ; sys_platform == "darwin" # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt @@ -499,9 +498,7 @@ vultr==1.0.1 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 8f04442954a..e5f2fa06df0 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -148,7 +148,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index bea5ec1ca29..316c7659cd4 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -109,7 +109,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -252,7 +252,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -391,7 +390,7 @@ pyzmq==23.2.0 # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt @@ -487,9 +486,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index b9c4b66f8d1..415b717d338 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -162,7 +162,7 @@ dnspython==2.6.1 # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/lint.in @@ -365,7 +365,6 @@ packaging==22.0 # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # ansible-core - # docker paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via # -c requirements/static/ci/py3.10/linux.txt @@ -533,7 +532,7 @@ redis==3.5.3 # via # -c requirements/static/ci/py3.10/linux.txt # redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt @@ -703,7 +702,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.10/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 2641bd1d25f..c77f42ab986 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -118,7 +118,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -263,7 +263,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # ansible-core - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -420,7 +419,7 @@ redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt @@ -536,9 +535,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index cca88510582..5b9624a13bf 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -102,7 +102,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -237,7 +237,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -384,7 +383,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt @@ -474,9 +473,7 @@ virtualenv==20.17.0 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 4a745cd4f31..eb7685c6fe3 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -148,7 +148,7 @@ dnspython==2.6.1 # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt @@ -338,7 +338,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -525,7 +524,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt @@ -663,7 +662,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.11/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index b32c62f48eb..0474319bf16 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -106,7 +106,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -242,7 +242,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -368,7 +367,7 @@ pyzmq==25.1.2 ; sys_platform == "darwin" # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # -r requirements/base.txt @@ -460,9 +459,7 @@ vultr==1.0.1 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt index 8a55c4b94f7..67257cbc16a 100644 --- a/requirements/static/ci/py3.11/docs.txt +++ b/requirements/static/ci/py3.11/docs.txt @@ -148,7 +148,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index c0649b96f54..a343e3b7825 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -107,7 +107,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -237,7 +237,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in @@ -366,7 +365,7 @@ pyzmq==23.2.0 # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # -r requirements/base.txt @@ -453,9 +452,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt index 65aef229e1e..120555d68e4 100644 --- a/requirements/static/ci/py3.11/lint.txt +++ b/requirements/static/ci/py3.11/lint.txt @@ -158,7 +158,7 @@ dnspython==2.6.1 # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/lint.in @@ -343,7 +343,6 @@ packaging==22.0 # -c requirements/static/ci/py3.11/linux.txt # -r requirements/base.txt # ansible-core - # docker paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via # -c requirements/static/ci/py3.11/linux.txt @@ -498,7 +497,7 @@ redis==3.5.3 # via # -c requirements/static/ci/py3.11/linux.txt # redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt @@ -649,7 +648,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.11/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index a386c6d95b0..485b10fe04d 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -116,7 +116,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -248,7 +248,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt # ansible-core - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in @@ -395,7 +394,7 @@ redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # -r requirements/base.txt @@ -502,9 +501,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index ea9de77cf1b..10d280d88e3 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -100,7 +100,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -233,7 +233,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -380,7 +379,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/base.txt @@ -468,9 +467,7 @@ virtualenv==20.17.0 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 09f3d25a844..b8ef4534c2d 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -148,7 +148,7 @@ dnspython==2.6.1 # -c requirements/static/ci/py3.12/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt @@ -338,7 +338,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -525,7 +524,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt @@ -663,7 +662,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.12/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index c8227a620ca..ffbbd194ec6 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -106,7 +106,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -242,7 +242,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -368,7 +367,7 @@ pyzmq==25.1.2 ; sys_platform == "darwin" # -c requirements/static/ci/../pkg/py3.12/darwin.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # -r requirements/base.txt @@ -460,9 +459,7 @@ vultr==1.0.1 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt index ef54a0f5fec..b32b4b18a24 100644 --- a/requirements/static/ci/py3.12/docs.txt +++ b/requirements/static/ci/py3.12/docs.txt @@ -148,7 +148,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index 776d62349ba..3ec1479d319 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -107,7 +107,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -237,7 +237,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in @@ -366,7 +365,7 @@ pyzmq==23.2.0 # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # -r requirements/base.txt @@ -453,9 +452,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index 18a938f6c3a..c0cb40aea24 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -158,7 +158,7 @@ dnspython==2.6.1 # -c requirements/static/ci/py3.12/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/static/ci/lint.in @@ -343,7 +343,6 @@ packaging==22.0 # -c requirements/static/ci/py3.12/linux.txt # -r requirements/base.txt # ansible-core - # docker paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via # -c requirements/static/ci/py3.12/linux.txt @@ -498,7 +497,7 @@ redis==3.5.3 # via # -c requirements/static/ci/py3.12/linux.txt # redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt @@ -649,7 +648,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.12/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 2ebfab472b2..a647e737fb1 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -116,7 +116,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -248,7 +248,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -r requirements/base.txt # ansible-core - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in @@ -395,7 +394,7 @@ redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # -r requirements/base.txt @@ -502,9 +501,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 8a0457d2741..c104716e476 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -100,7 +100,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -233,7 +233,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -380,7 +379,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # -r requirements/base.txt @@ -468,9 +467,7 @@ virtualenv==20.17.0 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 57c077481ab..d2da47f844b 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -167,7 +167,7 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==6.1.3 ; python_version < "3.8" # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt @@ -611,7 +611,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via # -c requirements/static/ci/../pkg/py3.7/linux.txt # -c requirements/static/ci/py3.7/linux.txt diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index 512e5ca8f36..f47c5ecee2e 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -152,7 +152,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/zeromq.txt -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 37a74ed3f86..486489b96fd 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -120,7 +120,7 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==6.1.3 ; python_version < "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -432,7 +432,7 @@ pyzmq==23.2.0 # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 594ffb2e9de..45d302e07ef 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -126,7 +126,7 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==6.1.3 ; python_version < "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -455,7 +455,7 @@ redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index f17c5f42902..a4876a1bb85 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -109,7 +109,7 @@ dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==6.1.3 ; python_version < "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -399,7 +399,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index e1a3e034ca2..dad71be79ec 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -162,7 +162,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt @@ -391,7 +391,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -598,7 +597,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt @@ -761,7 +760,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.8/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index e4a7834692a..88f29939285 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -148,7 +148,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 13b9753e92d..fb29adecf8b 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -115,7 +115,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -273,7 +273,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -419,7 +418,7 @@ pyzmq==23.2.0 # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt @@ -525,9 +524,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index e5c14f3986c..0f29de9ca21 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -167,7 +167,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/lint.in @@ -386,7 +386,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt - # docker paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via # -c requirements/static/ci/py3.8/linux.txt @@ -560,7 +559,7 @@ redis==3.5.3 # via # -c requirements/static/ci/py3.8/linux.txt # redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt @@ -737,7 +736,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.8/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index c0773244b1b..a941172a1b4 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -121,7 +121,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -279,7 +279,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -442,7 +441,7 @@ redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt @@ -566,9 +565,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index f3ca9898d02..b4b7b370aae 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -104,7 +104,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -239,7 +239,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -386,7 +385,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt @@ -477,9 +476,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index c4175c31613..0fa77246a8f 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -162,7 +162,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt @@ -391,7 +391,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -600,7 +599,7 @@ pyzmq==23.2.0 # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt @@ -763,7 +762,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.9/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 56b95b03d9b..c3ea1d2c1f2 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -117,7 +117,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -281,7 +281,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 # via @@ -428,7 +427,7 @@ pyzmq==25.1.2 ; sys_platform == "darwin" # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt @@ -539,9 +538,7 @@ vultr==1.0.1 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index 19f80df0096..f143242e05a 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -152,7 +152,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 7a94ca7c62a..2aea4421744 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -115,7 +115,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -273,7 +273,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -421,7 +420,7 @@ pyzmq==23.2.0 # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt @@ -527,9 +526,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 2a5da53eb93..528d6b19a6d 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -163,7 +163,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/lint.in @@ -382,7 +382,6 @@ packaging==22.0 # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt - # docker paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via # -c requirements/static/ci/py3.9/linux.txt @@ -558,7 +557,7 @@ redis==3.5.3 # via # -c requirements/static/ci/py3.9/linux.txt # redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt @@ -735,7 +734,6 @@ watchdog==0.10.3 websocket-client==0.40.0 # via # -c requirements/static/ci/py3.9/linux.txt - # docker # kubernetes wempy==0.2.1 # via diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 563bd28600d..7ceba7b213f 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -119,7 +119,7 @@ dnspython==2.6.1 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -277,7 +277,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt - # docker # pytest paramiko==3.4.0 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -442,7 +441,7 @@ redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt @@ -566,9 +565,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 71e17a79cd8..f286fbcccc3 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -104,7 +104,7 @@ dnspython==2.6.1 # via # -r requirements/static/ci/common.in # python-etcd -docker==6.1.3 +docker==7.1.0 ; python_version >= "3.8" # via -r requirements/pytest.txt etcd3-py==0.1.6 # via -r requirements/static/ci/common.in @@ -239,7 +239,6 @@ packaging==22.0 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt - # docker # pytest passlib==1.7.4 # via -r requirements/static/ci/common.in @@ -387,7 +386,7 @@ pyzmq==25.0.2 ; sys_platform == "win32" # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt @@ -478,9 +477,7 @@ virtualenv==20.7.2 watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 - # via - # docker - # kubernetes + # via kubernetes wempy==0.2.1 # via -r requirements/static/ci/common.in werkzeug==3.0.3 diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index b860eda060a..352e85425db 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -99,7 +99,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # apache-libcloud diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index a812499ca8f..5b70eee0931 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -89,7 +89,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index cb0821f6af5..9837af9d246 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -87,7 +87,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 8b47f4e3851..16140376647 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -108,7 +108,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt index 4c351dfaa6f..8aa87b6d47e 100644 --- a/requirements/static/pkg/py3.11/darwin.txt +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -99,7 +99,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # apache-libcloud diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt index 0c00120e572..34217f8d845 100644 --- a/requirements/static/pkg/py3.11/freebsd.txt +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -89,7 +89,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt index f4a6d28b930..fdda37052ce 100644 --- a/requirements/static/pkg/py3.11/linux.txt +++ b/requirements/static/pkg/py3.11/linux.txt @@ -87,7 +87,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index 2a3ab618788..8648d4d0a94 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -108,7 +108,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt index baf8c97f33d..aa0a2da85cd 100644 --- a/requirements/static/pkg/py3.12/darwin.txt +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -99,7 +99,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # apache-libcloud diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt index 4e6d7ca14ca..5f0118af88c 100644 --- a/requirements/static/pkg/py3.12/freebsd.txt +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -89,7 +89,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt index fcfd7bc2d20..c7acdd062a7 100644 --- a/requirements/static/pkg/py3.12/linux.txt +++ b/requirements/static/pkg/py3.12/linux.txt @@ -87,7 +87,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt index cd75b2a8d18..57a8aae61de 100644 --- a/requirements/static/pkg/py3.12/windows.txt +++ b/requirements/static/pkg/py3.12/windows.txt @@ -108,7 +108,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index 511abe0cad7..db76b8d06af 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -87,7 +87,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index 1fbd1d13569..96aee564ac4 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -85,7 +85,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index b264b0e4d0b..9328483442a 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -109,7 +109,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.31.0 ; python_version < "3.8" # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index ec75accaad4..bbea4709b00 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -87,7 +87,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index b5151d758a9..6c5ed4a3008 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -85,7 +85,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 299b8b4dcdb..4aad72e3e2f 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -109,7 +109,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index 6200b39d76e..a9559e40b8b 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -99,7 +99,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.1.2 ; sys_platform == "darwin" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # apache-libcloud diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 2ad4b2529de..93e263fb218 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -89,7 +89,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index 5902b17a3dc..acc0aacd5fa 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -87,7 +87,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index de300d6afb0..4f1d8efd7d8 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -109,7 +109,7 @@ pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.31.0 +requests==2.32.3 ; python_version >= "3.8" # via # -r requirements/base.txt # -r requirements/windows.txt From dfd6221cc55d0860505f32c6d947178eae1131eb Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 03:22:17 -0700 Subject: [PATCH 133/160] Fix salt cloud's log_file location when root_dir is given in config --- changelog/64728.fixed.md | 1 + salt/config/__init__.py | 2 +- tests/pytests/functional/test_config.py | 13 +++++++++++++ 3 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 changelog/64728.fixed.md diff --git a/changelog/64728.fixed.md b/changelog/64728.fixed.md new file mode 100644 index 00000000000..0d18efed6f3 --- /dev/null +++ b/changelog/64728.fixed.md @@ -0,0 +1 @@ +Cloud honors root_dir config settin when determining log file location diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 0af8c0c1f46..965da432222 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -2765,7 +2765,7 @@ def cloud_config( # prepend root_dir prepend_root_dirs = ["cachedir"] if "log_file" in opts and urllib.parse.urlparse(opts["log_file"]).scheme == "": - prepend_root_dirs.append(opts["log_file"]) + prepend_root_dirs.append("log_file") prepend_root_dir(opts, prepend_root_dirs) salt.features.setup_features(opts) diff --git a/tests/pytests/functional/test_config.py b/tests/pytests/functional/test_config.py index 76d25118f4a..e03a3b42d19 100644 --- a/tests/pytests/functional/test_config.py +++ b/tests/pytests/functional/test_config.py @@ -25,3 +25,16 @@ def test_minion_config_type_check(caplog): assert msg not in caplog.text finally: os.remove(path) + + +def test_cloud_config_relative_logfile(tmp_path): + root_path = tmp_path + config_path = tmp_path / "conf" + config_path.mkdir() + cloud_config = config_path / "cloud" + cloud_config.write_text("") + master_config = config_path / "master" + master_config = config_path / "master" + master_config.write_text(f"root_dir: {root_path}") + opts = salt.config.cloud_config(cloud_config) + assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "cloud") From 57d2becb8fdf560d0f2fb7f55ac2e5a3ddb2e933 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 08:20:29 -0700 Subject: [PATCH 134/160] Add basic unit test for prepend_root_dir method --- tests/pytests/unit/test_config.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/pytests/unit/test_config.py b/tests/pytests/unit/test_config.py index 7437c8214ed..9ffc41dc6c4 100644 --- a/tests/pytests/unit/test_config.py +++ b/tests/pytests/unit/test_config.py @@ -5,6 +5,8 @@ tests.pytests.unit.test_config Unit tests for salt's config modulet """ +import sys + import salt.config @@ -21,3 +23,13 @@ def test_call_id_function(tmp_path): } ret = salt.config.call_id_function(opts) assert ret == "meh" + + +def test_prepend_root_dir(tmp_path): + root = tmp_path / "root" + opts = { + "root_dir": root, + "foo": "c:\\var\\foo" if sys.platform == "win32" else "/var/foo", + } + salt.config.prepend_root_dir(opts, ["foo"]) + assert opts["foo"] == str(root / "var" / "foo") From 9bfe7cf89876846598b3a39ed6d03482c455acdb Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 10:00:09 -0700 Subject: [PATCH 135/160] Fix cloud config root_dir paths on windows --- salt/config/__init__.py | 9 ++++++++- tests/pytests/functional/test_config.py | 15 ++++++++++++++- tests/pytests/unit/test_config.py | 6 ++++-- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 965da432222..3bb8404fedd 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -2215,6 +2215,13 @@ def include_config(include, orig_path, verbose, exit_on_config_errors=False): return configuration +def should_prepend_root_dir(key, opts): + return ( + key in opts + and urllib.parse.urlparse(os.path.splitdrive(opts[key])[1]).scheme == "" + ) + + def prepend_root_dir(opts, path_options): """ Prepends the options that represent filesystem paths with value of the @@ -2764,7 +2771,7 @@ def cloud_config( # prepend root_dir prepend_root_dirs = ["cachedir"] - if "log_file" in opts and urllib.parse.urlparse(opts["log_file"]).scheme == "": + if should_prepend_root_dir("log_file", opts): prepend_root_dirs.append("log_file") prepend_root_dir(opts, prepend_root_dirs) diff --git a/tests/pytests/functional/test_config.py b/tests/pytests/functional/test_config.py index e03a3b42d19..f171ac580fa 100644 --- a/tests/pytests/functional/test_config.py +++ b/tests/pytests/functional/test_config.py @@ -27,7 +27,7 @@ def test_minion_config_type_check(caplog): os.remove(path) -def test_cloud_config_relative_logfile(tmp_path): +def test_cloud_config_relative_log_file(tmp_path): root_path = tmp_path config_path = tmp_path / "conf" config_path.mkdir() @@ -38,3 +38,16 @@ def test_cloud_config_relative_logfile(tmp_path): master_config.write_text(f"root_dir: {root_path}") opts = salt.config.cloud_config(cloud_config) assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "cloud") + + +def test_cloud_config_relative_cachedir(tmp_path): + root_path = tmp_path + config_path = tmp_path / "conf" + config_path.mkdir() + cloud_config = config_path / "cloud" + cloud_config.write_text("") + master_config = config_path / "master" + master_config = config_path / "master" + master_config.write_text(f"root_dir: {root_path}") + opts = salt.config.cloud_config(cloud_config) + assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "cloud") diff --git a/tests/pytests/unit/test_config.py b/tests/pytests/unit/test_config.py index 9ffc41dc6c4..60bb65f2e01 100644 --- a/tests/pytests/unit/test_config.py +++ b/tests/pytests/unit/test_config.py @@ -5,9 +5,10 @@ tests.pytests.unit.test_config Unit tests for salt's config modulet """ -import sys +import pathlib import salt.config +import salt.syspaths def test_call_id_function(tmp_path): @@ -29,7 +30,8 @@ def test_prepend_root_dir(tmp_path): root = tmp_path / "root" opts = { "root_dir": root, - "foo": "c:\\var\\foo" if sys.platform == "win32" else "/var/foo", + "foo": str(pathlib.Path(salt.syspaths.ROOT_DIR) / "var" / "foo"), } salt.config.prepend_root_dir(opts, ["foo"]) + print(f"after {opts['foo']}") assert opts["foo"] == str(root / "var" / "foo") From baee8afd3a869e84f2d03aa34cc713eb5be91f3f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 10:58:54 -0700 Subject: [PATCH 136/160] Additional fixes for windows root_dir logic --- changelog/64728.fixed.md | 2 +- salt/config/__init__.py | 17 ++-- tests/pytests/functional/test_config.py | 106 ++++++++++++++++++++++-- 3 files changed, 104 insertions(+), 21 deletions(-) diff --git a/changelog/64728.fixed.md b/changelog/64728.fixed.md index 0d18efed6f3..afe36f42316 100644 --- a/changelog/64728.fixed.md +++ b/changelog/64728.fixed.md @@ -1 +1 @@ -Cloud honors root_dir config settin when determining log file location +salt-cloud honors root_dir config setting for log_file location and fixes for root_dir locations on windows. diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 3bb8404fedd..b6749868bd4 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -2218,6 +2218,7 @@ def include_config(include, orig_path, verbose, exit_on_config_errors=False): def should_prepend_root_dir(key, opts): return ( key in opts + and opts[key] is not None and urllib.parse.urlparse(os.path.splitdrive(opts[key])[1]).scheme == "" ) @@ -2519,7 +2520,7 @@ def syndic_config( ] for config_key in ("log_file", "key_logfile", "syndic_log_file"): # If this is not a URI and instead a local path - if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "": + if should_prepend_root_dir(config_key, opts): prepend_root_dirs.append(config_key) prepend_root_dir(opts, prepend_root_dirs) salt.features.setup_features(opts) @@ -3842,7 +3843,7 @@ def apply_minion_config( # These can be set to syslog, so, not actual paths on the system for config_key in ("log_file", "key_logfile"): - if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "": + if should_prepend_root_dir(config_key, opts): prepend_root_dirs.append(config_key) prepend_root_dir(opts, prepend_root_dirs) @@ -4078,11 +4079,7 @@ def apply_master_config(overrides=None, defaults=None): # These can be set to syslog, so, not actual paths on the system for config_key in ("log_file", "key_logfile", "ssh_log_file"): - log_setting = opts.get(config_key, "") - if log_setting is None: - continue - - if urllib.parse.urlparse(log_setting).scheme == "": + if should_prepend_root_dir(config_key, opts): prepend_root_dirs.append(config_key) prepend_root_dir(opts, prepend_root_dirs) @@ -4289,11 +4286,7 @@ def apply_spm_config(overrides, defaults): # These can be set to syslog, so, not actual paths on the system for config_key in ("spm_logfile",): - log_setting = opts.get(config_key, "") - if log_setting is None: - continue - - if urllib.parse.urlparse(log_setting).scheme == "": + if should_prepend_root_dir(config_key, opts): prepend_root_dirs.append(config_key) prepend_root_dir(opts, prepend_root_dirs) diff --git a/tests/pytests/functional/test_config.py b/tests/pytests/functional/test_config.py index f171ac580fa..5a3d77be90d 100644 --- a/tests/pytests/functional/test_config.py +++ b/tests/pytests/functional/test_config.py @@ -27,27 +27,117 @@ def test_minion_config_type_check(caplog): os.remove(path) -def test_cloud_config_relative_log_file(tmp_path): +def test_cloud_config_relative_to_root_dir(tmp_path): root_path = tmp_path config_path = tmp_path / "conf" config_path.mkdir() cloud_config = config_path / "cloud" cloud_config.write_text("") master_config = config_path / "master" - master_config = config_path / "master" master_config.write_text(f"root_dir: {root_path}") opts = salt.config.cloud_config(cloud_config) assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "cloud") + assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "cloud") -def test_cloud_config_relative_cachedir(tmp_path): +def test_master_config_relative_to_root_dir(tmp_path): root_path = tmp_path config_path = tmp_path / "conf" config_path.mkdir() - cloud_config = config_path / "cloud" - cloud_config.write_text("") - master_config = config_path / "master" master_config = config_path / "master" master_config.write_text(f"root_dir: {root_path}") - opts = salt.config.cloud_config(cloud_config) - assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "cloud") + opts = salt.config.master_config(master_config) + assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "master") + assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "master") + assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-master.pid") + assert opts["sock_dir"] == str(root_path / "var" / "run" / "salt" / "master") + assert opts["extension_modules"] == str( + root_path / "var" / "cache" / "salt" / "master" / "extmods" + ) + assert opts["token_dir"] == str( + root_path / "var" / "cache" / "salt" / "master" / "tokens" + ) + assert opts["syndic_dir"] == str( + root_path / "var" / "cache" / "salt" / "master" / "syndics" + ) + assert opts["sqlite_queue_dir"] == str( + root_path / "var" / "cache" / "salt" / "master" / "queues" + ) + assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "master") + assert opts["key_logfile"] == str(root_path / "var" / "log" / "salt" / "key") + assert opts["ssh_log_file"] == str(root_path / "var" / "log" / "salt" / "ssh") + + # These are not tested because we didn't define them in the master config. + # assert opts["autosign_file"] == str(root_path / "var" / "run" / "salt"/ "master") + # assert opts["autoreject_file"] == str(root_path / "var" / "run" / "salt"/ "master") + # assert opts["autosign_grains_dir"] == str(root_path / "var" / "run" / "salt"/ "master") + + +def test_minion_config_relative_to_root_dir(tmp_path): + root_path = tmp_path + config_path = tmp_path / "conf" + config_path.mkdir() + minion_config = config_path / "minion" + minion_config.write_text(f"root_dir: {root_path}") + opts = salt.config.minion_config(minion_config) + assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "minion") + assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "minion") + assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-minion.pid") + assert opts["sock_dir"] == str(root_path / "var" / "run" / "salt" / "minion") + assert opts["extension_modules"] == str( + root_path / "var" / "cache" / "salt" / "minion" / "extmods" + ) + assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "minion") + + +def test_api_config_relative_to_root_dir(tmp_path): + root_path = tmp_path + config_path = tmp_path / "conf" + config_path.mkdir() + master_config = config_path / "master" + master_config.write_text(f"root_dir: {root_path}") + opts = salt.config.api_config(master_config) + assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-api.pid") + assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "api") + assert opts["api_pidfile"] == str(root_path / "var" / "run" / "salt-api.pid") + assert opts["api_logfile"] == str(root_path / "var" / "log" / "salt" / "api") + + +def test_spm_config_relative_to_root_dir(tmp_path): + root_path = tmp_path + config_path = tmp_path / "conf" + config_path.mkdir() + spm_config = config_path / "spm" + spm_config.write_text(f"root_dir: {root_path}") + opts = salt.config.spm_config(spm_config) + + assert opts["formula_path"] == str(root_path / "srv" / "spm" / "salt") + assert opts["pillar_path"] == str(root_path / "srv" / "spm" / "pillar") + assert opts["reactor_path"] == str(root_path / "srv" / "spm" / "reactor") + assert opts["spm_cache_dir"] == str(root_path / "var" / "cache" / "salt" / "spm") + assert opts["spm_build_dir"] == str(root_path / "srv" / "spm_build") + assert opts["spm_logfile"] == str(root_path / "var" / "log" / "salt" / "spm") + + +def test_syndic_config_relative_to_root_dir(tmp_path): + root_path = tmp_path + config_path = tmp_path / "conf" + config_path.mkdir() + master_config = config_path / "master" + master_config.write_text(f"root_dir: {root_path}") + minion_config = config_path / "master" + minion_config.write_text(f"root_dir: {root_path}") + opts = salt.config.syndic_config(master_config, minion_config) + assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "minion") + assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "master") + assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-syndic.pid") + assert opts["sock_dir"] == str(root_path / "var" / "run" / "salt" / "minion") + assert opts["extension_modules"] == str( + root_path / "var" / "cache" / "salt" / "minion" / "extmods" + ) + assert opts["token_dir"] == str( + root_path / "var" / "cache" / "salt" / "master" / "tokens" + ) + assert opts["log_file"] == str(root_path / "var" / "log" / "salt" / "syndic") + assert opts["key_logfile"] == str(root_path / "var" / "log" / "salt" / "key") + assert opts["syndic_log_file"] == str(root_path / "var" / "log" / "salt" / "syndic") From 5f6c06ed56c88ee7d2b81418e09ef1e3ab775314 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 11:55:02 -0700 Subject: [PATCH 137/160] Remove unwanted print statement --- tests/pytests/unit/test_config.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/pytests/unit/test_config.py b/tests/pytests/unit/test_config.py index 60bb65f2e01..313c3cb0b0e 100644 --- a/tests/pytests/unit/test_config.py +++ b/tests/pytests/unit/test_config.py @@ -33,5 +33,4 @@ def test_prepend_root_dir(tmp_path): "foo": str(pathlib.Path(salt.syspaths.ROOT_DIR) / "var" / "foo"), } salt.config.prepend_root_dir(opts, ["foo"]) - print(f"after {opts['foo']}") assert opts["foo"] == str(root / "var" / "foo") From 6c6f0f41f0f65d8b39c93bfd4c8af82858538f2c Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sun, 23 Jun 2024 03:55:32 -0700 Subject: [PATCH 138/160] fix configuration path test on windows --- tests/pytests/functional/test_config.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/tests/pytests/functional/test_config.py b/tests/pytests/functional/test_config.py index 5a3d77be90d..9417c1b6751 100644 --- a/tests/pytests/functional/test_config.py +++ b/tests/pytests/functional/test_config.py @@ -5,6 +5,7 @@ import tempfile import pytest import salt.config +import salt.utils.platform pytestmark = [ pytest.mark.windows_whitelisted, @@ -47,7 +48,10 @@ def test_master_config_relative_to_root_dir(tmp_path): master_config = config_path / "master" master_config.write_text(f"root_dir: {root_path}") opts = salt.config.master_config(master_config) - assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "master") + if salt.utils.platform.is_windows(): + assert opts["pki_dir"] == str(root_path / "conf" / "pki" / "master") + else: + assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "master") assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "master") assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-master.pid") assert opts["sock_dir"] == str(root_path / "var" / "run" / "salt" / "master") @@ -80,7 +84,10 @@ def test_minion_config_relative_to_root_dir(tmp_path): minion_config = config_path / "minion" minion_config.write_text(f"root_dir: {root_path}") opts = salt.config.minion_config(minion_config) - assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "minion") + if salt.utils.platform.is_windows(): + assert opts["pki_dir"] == str(root_path / "conf" / "pki" / "minion") + else: + assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "minion") assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "minion") assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-minion.pid") assert opts["sock_dir"] == str(root_path / "var" / "run" / "salt" / "minion") @@ -128,7 +135,10 @@ def test_syndic_config_relative_to_root_dir(tmp_path): minion_config = config_path / "master" minion_config.write_text(f"root_dir: {root_path}") opts = salt.config.syndic_config(master_config, minion_config) - assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "minion") + if salt.utils.platform.is_windows(): + assert opts["pki_dir"] == str(root_path / "conf" / "pki" / "minion") + else: + assert opts["pki_dir"] == str(root_path / "etc" / "salt" / "pki" / "minion") assert opts["cachedir"] == str(root_path / "var" / "cache" / "salt" / "master") assert opts["pidfile"] == str(root_path / "var" / "run" / "salt-syndic.pid") assert opts["sock_dir"] == str(root_path / "var" / "run" / "salt" / "minion") From 766f8cf988abc9a8dc8cfeb41e2c7d6e21daa6b3 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 01:01:43 -0700 Subject: [PATCH 139/160] Add note to virtualenv state docs about onedir --- salt/states/virtualenv_mod.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/salt/states/virtualenv_mod.py b/salt/states/virtualenv_mod.py index 957f44265bc..7981ec08ac5 100644 --- a/salt/states/virtualenv_mod.py +++ b/salt/states/virtualenv_mod.py @@ -77,7 +77,9 @@ def managed( Prefer wheel archives (requires pip >= 1.4). python: None - Python executable used to build the virtualenv + Python executable used to build the virtualenv. When Salt is installed + from a onedir package. You will likely want to specify which python + interpertet should be used. user: None The user under which to run virtualenv and pip. @@ -131,6 +133,12 @@ def managed( - requirements: salt://REQUIREMENTS.txt - env_vars: PATH_VAR: '/usr/local/bin/' + + Current versions of Salt use onedir packages and will use onedir python + interpreter by default. If you've installed Salt via out package + repository. You will likely want to provide the path to the interpreter + with wich you would like to be used to create the virtual envrionment. The + interperter can be sepcified by providing the `python` option. """ ret = {"name": name, "result": True, "comment": "", "changes": {}} From 809a22598bb38987132ab700e7fd3539e5f97b8c Mon Sep 17 00:00:00 2001 From: Akmod Date: Mon, 24 Jun 2024 10:56:43 -0600 Subject: [PATCH 140/160] Update grammer in docstring of virtualenv_mod.py --- salt/states/virtualenv_mod.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/salt/states/virtualenv_mod.py b/salt/states/virtualenv_mod.py index 7981ec08ac5..7dadfa23fd5 100644 --- a/salt/states/virtualenv_mod.py +++ b/salt/states/virtualenv_mod.py @@ -79,7 +79,7 @@ def managed( python: None Python executable used to build the virtualenv. When Salt is installed from a onedir package. You will likely want to specify which python - interpertet should be used. + interperter should be used. user: None The user under which to run virtualenv and pip. @@ -138,7 +138,7 @@ def managed( interpreter by default. If you've installed Salt via out package repository. You will likely want to provide the path to the interpreter with wich you would like to be used to create the virtual envrionment. The - interperter can be sepcified by providing the `python` option. + interperter can be specified by providing the `python` option. """ ret = {"name": name, "result": True, "comment": "", "changes": {}} From 94eaf943453cab3367e521c7691b5feda0b8e675 Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 6 Nov 2023 12:53:50 +0100 Subject: [PATCH 141/160] Add `slsutil` SSH wrapper (cherry picked from commit ddc119764707a355069ff93350ab8e06f2a688ea) --- changelog/50196.fixed.md | 1 + changelog/61143.fixed.md | 1 + changelog/65067.fixed.md | 1 + salt/client/ssh/wrapper/slsutil.py | 450 ++++++++++++++++++ tests/pytests/integration/ssh/test_slsutil.py | 94 ++++ .../unit/client/ssh/wrapper/test_slsutil.py | 166 +++++++ 6 files changed, 713 insertions(+) create mode 100644 changelog/50196.fixed.md create mode 100644 changelog/61143.fixed.md create mode 100644 changelog/65067.fixed.md create mode 100644 salt/client/ssh/wrapper/slsutil.py create mode 100644 tests/pytests/integration/ssh/test_slsutil.py create mode 100644 tests/pytests/unit/client/ssh/wrapper/test_slsutil.py diff --git a/changelog/50196.fixed.md b/changelog/50196.fixed.md new file mode 100644 index 00000000000..979411a640d --- /dev/null +++ b/changelog/50196.fixed.md @@ -0,0 +1 @@ +Made slsutil.renderer work with salt-ssh diff --git a/changelog/61143.fixed.md b/changelog/61143.fixed.md new file mode 100644 index 00000000000..08a62c9d8b1 --- /dev/null +++ b/changelog/61143.fixed.md @@ -0,0 +1 @@ +Made slsutil.findup work with salt-ssh diff --git a/changelog/65067.fixed.md b/changelog/65067.fixed.md new file mode 100644 index 00000000000..d6de87b5bc1 --- /dev/null +++ b/changelog/65067.fixed.md @@ -0,0 +1 @@ +Fixed slsutil.update with salt-ssh during template rendering diff --git a/salt/client/ssh/wrapper/slsutil.py b/salt/client/ssh/wrapper/slsutil.py new file mode 100644 index 00000000000..e09ca1c2984 --- /dev/null +++ b/salt/client/ssh/wrapper/slsutil.py @@ -0,0 +1,450 @@ +import os.path +import posixpath + +import salt.exceptions +import salt.loader +import salt.template +import salt.utils.args +import salt.utils.dictupdate +import salt.utils.stringio + +CONTEXT_BASE = "slsutil" + + +def update(dest, upd, recursive_update=True, merge_lists=False): + """ + Merge ``upd`` recursively into ``dest`` + + If ``merge_lists=True``, will aggregate list object types instead of + replacing. This behavior is only activated when ``recursive_update=True``. + + CLI Example: + + .. code-block:: shell + + salt '*' slsutil.update '{foo: Foo}' '{bar: Bar}' + + """ + return salt.utils.dictupdate.update(dest, upd, recursive_update, merge_lists) + + +def merge(obj_a, obj_b, strategy="smart", renderer="yaml", merge_lists=False): + """ + Merge a data structure into another by choosing a merge strategy + + Strategies: + + * aggregate + * list + * overwrite + * recurse + * smart + + CLI Example: + + .. code-block:: shell + + salt '*' slsutil.merge '{foo: Foo}' '{bar: Bar}' + """ + return salt.utils.dictupdate.merge(obj_a, obj_b, strategy, renderer, merge_lists) + + +def merge_all(lst, strategy="smart", renderer="yaml", merge_lists=False): + """ + .. versionadded:: 2019.2.0 + + Merge a list of objects into each other in order + + :type lst: Iterable + :param lst: List of objects to be merged. + + :type strategy: String + :param strategy: Merge strategy. See utils.dictupdate. + + :type renderer: String + :param renderer: + Renderer type. Used to determine strategy when strategy is 'smart'. + + :type merge_lists: Bool + :param merge_lists: Defines whether to merge embedded object lists. + + CLI Example: + + .. code-block:: shell + + $ salt-call --output=txt slsutil.merge_all '[{foo: Foo}, {foo: Bar}]' + local: {u'foo': u'Bar'} + """ + + ret = {} + for obj in lst: + ret = salt.utils.dictupdate.merge(ret, obj, strategy, renderer, merge_lists) + + return ret + + +def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs): + """ + Parse a string or file through Salt's renderer system + + .. versionchanged:: 2018.3.0 + Add support for Salt fileserver URIs. + + This is an open-ended function and can be used for a variety of tasks. It + makes use of Salt's "renderer pipes" system to run a string or file through + a pipe of any of the loaded renderer modules. + + :param path: The path to a file on Salt's fileserver (any URIs supported by + :py:func:`cp.get_url `) or on the local file + system. + :param string: An inline string to be used as the file to send through the + renderer system. Note, not all renderer modules can work with strings; + the 'py' renderer requires a file, for example. + :param default_renderer: The renderer pipe to send the file through; this + is overridden by a "she-bang" at the top of the file. + :param kwargs: Keyword args to pass to Salt's compile_template() function. + + Keep in mind the goal of each renderer when choosing a render-pipe; for + example, the Jinja renderer processes a text file and produces a string, + however the YAML renderer processes a text file and produces a data + structure. + + One possible use is to allow writing "map files", as are commonly seen in + Salt formulas, but without tying the renderer of the map file to the + renderer used in the other sls files. In other words, a map file could use + the Python renderer and still be included and used by an sls file that uses + the default 'jinja|yaml' renderer. + + For example, the two following map files produce identical results but one + is written using the normal 'jinja|yaml' and the other is using 'py': + + .. code-block:: jinja + + #!jinja|yaml + {% set apache = salt.grains.filter_by({ + ...normal jinja map file here... + }, merge=salt.pillar.get('apache:lookup')) %} + {{ apache | yaml() }} + + .. code-block:: python + + #!py + def run(): + apache = __salt__.grains.filter_by({ + ...normal map here but as a python dict... + }, merge=__salt__.pillar.get('apache:lookup')) + return apache + + Regardless of which of the above map files is used, it can be accessed from + any other sls file by calling this function. The following is a usage + example in Jinja: + + .. code-block:: jinja + + {% set apache = salt.slsutil.renderer('map.sls') %} + + CLI Example: + + .. code-block:: bash + + salt '*' slsutil.renderer salt://path/to/file + salt '*' slsutil.renderer /path/to/file + salt '*' slsutil.renderer /path/to/file.jinja default_renderer='jinja' + salt '*' slsutil.renderer /path/to/file.sls default_renderer='jinja|yaml' + salt '*' slsutil.renderer string='Inline template! {{ saltenv }}' + salt '*' slsutil.renderer string='Hello, {{ name }}.' name='world' + """ + if not path and not string: + raise salt.exceptions.SaltInvocationError("Must pass either path or string") + + renderers = salt.loader.render(__opts__, __salt__) + + if path: + path_or_string = __context__["fileclient"].get_url( + path, "", saltenv=kwargs.get("saltenv", "base") + ) + elif string: + path_or_string = ":string:" + kwargs["input_data"] = string + + ret = salt.template.compile_template( + path_or_string, + renderers, + default_renderer, + __opts__["renderer_blacklist"], + __opts__["renderer_whitelist"], + **kwargs + ) + return ret.read() if salt.utils.stringio.is_readable(ret) else ret + + +def _get_serialize_fn(serializer, fn_name): + serializers = salt.loader.serializers(__opts__) + fns = getattr(serializers, serializer, None) + fn = getattr(fns, fn_name, None) + + if not fns: + raise salt.exceptions.CommandExecutionError( + "Serializer '{}' not found.".format(serializer) + ) + + if not fn: + raise salt.exceptions.CommandExecutionError( + "Serializer '{}' does not implement {}.".format(serializer, fn_name) + ) + + return fn + + +def serialize(serializer, obj, **mod_kwargs): + """ + Serialize a Python object using one of the available + :ref:`all-salt.serializers`. + + CLI Example: + + .. code-block:: bash + + salt '*' --no-parse=obj slsutil.serialize 'json' obj="{'foo': 'Foo!'} + + Jinja Example: + + .. code-block:: jinja + + {% set json_string = salt.slsutil.serialize('json', + {'foo': 'Foo!'}) %} + """ + kwargs = salt.utils.args.clean_kwargs(**mod_kwargs) + return _get_serialize_fn(serializer, "serialize")(obj, **kwargs) + + +def deserialize(serializer, stream_or_string, **mod_kwargs): + """ + Deserialize a Python object using one of the available + :ref:`all-salt.serializers`. + + CLI Example: + + .. code-block:: bash + + salt '*' slsutil.deserialize 'json' '{"foo": "Foo!"}' + salt '*' --no-parse=stream_or_string slsutil.deserialize 'json' \\ + stream_or_string='{"foo": "Foo!"}' + + Jinja Example: + + .. code-block:: jinja + + {% set python_object = salt.slsutil.deserialize('json', + '{"foo": "Foo!"}') %} + """ + kwargs = salt.utils.args.clean_kwargs(**mod_kwargs) + return _get_serialize_fn(serializer, "deserialize")(stream_or_string, **kwargs) + + +def boolstr(value, true="true", false="false"): + """ + Convert a boolean value into a string. This function is + intended to be used from within file templates to provide + an easy way to take boolean values stored in Pillars or + Grains, and write them out in the appropriate syntax for + a particular file template. + + :param value: The boolean value to be converted + :param true: The value to return if ``value`` is ``True`` + :param false: The value to return if ``value`` is ``False`` + + In this example, a pillar named ``smtp:encrypted`` stores a boolean + value, but the template that uses that value needs ``yes`` or ``no`` + to be written, based on the boolean value. + + *Note: this is written on two lines for clarity. The same result + could be achieved in one line.* + + .. code-block:: jinja + + {% set encrypted = salt[pillar.get]('smtp:encrypted', false) %} + use_tls: {{ salt['slsutil.boolstr'](encrypted, 'yes', 'no') }} + + Result (assuming the value is ``True``): + + .. code-block:: none + + use_tls: yes + + """ + + if value: + return true + + return false + + +def _set_context(keys, function, fun_args=None, fun_kwargs=None, force=False): + """ + Convenience function to set a value in the ``__context__`` dictionary. + + .. versionadded:: 3004 + + :param keys: The list of keys specifying the dictionary path to set. This + list can be of arbitrary length and the path will be created + in the dictionary if it does not exist. + + :param function: A python function to be called if the specified path does + not exist, if the force parameter is ``True``. + + :param fun_args: A list of positional arguments to the function. + + :param fun_kwargs: A dictionary of keyword arguments to the function. + + :param force: If ``True``, force the ```__context__`` path to be updated. + Otherwise, only create it if it does not exist. + """ + + target = __context__ + + # Build each level of the dictionary as needed + for key in keys[:-1]: + if key not in target: + target[key] = {} + target = target[key] + + # Call the supplied function to populate the dictionary + if force or keys[-1] not in target: + if not fun_args: + fun_args = [] + + if not fun_kwargs: + fun_kwargs = {} + + target[keys[-1]] = function(*fun_args, *fun_kwargs) + + +def file_exists(path, saltenv="base"): + """ + Return ``True`` if a file exists in the state tree, ``False`` otherwise. + + .. versionadded:: 3004 + + :param str path: The fully qualified path to a file in the state tree. + :param str saltenv: The fileserver environment to search. Default: ``base`` + + CLI Example: + + .. code-block:: bash + + salt '*' slsutil.file_exists nginx/defaults.yaml + """ + + _set_context( + [CONTEXT_BASE, saltenv, "file_list"], __salt__["cp.list_master"], [saltenv] + ) + return path in __context__[CONTEXT_BASE][saltenv]["file_list"] + + +def dir_exists(path, saltenv="base"): + """ + Return ``True`` if a directory exists in the state tree, ``False`` otherwise. + + :param str path: The fully qualified path to a directory in the state tree. + :param str saltenv: The fileserver environment to search. Default: ``base`` + + .. versionadded:: 3004 + + CLI Example: + + .. code-block:: bash + + salt '*' slsutil.dir_exists nginx/files + """ + + _set_context( + [CONTEXT_BASE, saltenv, "dir_list"], __salt__["cp.list_master_dirs"], [saltenv] + ) + return path in __context__[CONTEXT_BASE][saltenv]["dir_list"] + + +def path_exists(path, saltenv="base"): + """ + Return ``True`` if a path exists in the state tree, ``False`` otherwise. The path + could refer to a file or directory. + + .. versionadded:: 3004 + + :param str path: The fully qualified path to a file or directory in the state tree. + :param str saltenv: The fileserver environment to search. Default: ``base`` + + CLI Example: + + .. code-block:: bash + + salt '*' slsutil.path_exists nginx/defaults.yaml + """ + + return file_exists(path, saltenv) or dir_exists(path, saltenv) + + +def findup(startpath, filenames, saltenv="base"): + """ + Find the first path matching a filename or list of filenames in a specified + directory or the nearest ancestor directory. Returns the full path to the + first file found. + + .. versionadded:: 3004 + + :param str startpath: The fileserver path from which to begin the search. + An empty string refers to the state tree root. + :param filenames: A filename or list of filenames to search for. Searching for + directory names is also supported. + :param str saltenv: The fileserver environment to search. Default: ``base`` + + Example: return the path to ``defaults.yaml``, walking up the tree from the + state file currently being processed. + + .. code-block:: jinja + + {{ salt["slsutil.findup"](tplfile, "defaults.yaml") }} + + CLI Example: + + .. code-block:: bash + + salt '*' slsutil.findup formulas/shared/nginx map.jinja + """ + + # Normalize the path + if startpath: + startpath = posixpath.normpath(startpath) + + # Verify the cwd is a valid path in the state tree + if startpath and not path_exists(startpath, saltenv): + raise salt.exceptions.SaltInvocationError( + "Starting path not found in the state tree: {}".format(startpath) + ) + + # Ensure that patterns is a string or list of strings + if isinstance(filenames, str): + filenames = [filenames] + if not isinstance(filenames, list): + raise salt.exceptions.SaltInvocationError( + "Filenames argument must be a string or list of strings" + ) + + while True: + + # Loop over filenames, looking for one at the current path level + for filename in filenames: + fullname = salt.utils.path.join( + startpath or "", filename, use_posixpath=True + ) + if path_exists(fullname, saltenv): + return fullname + + # If the root path was just checked, raise an error + if not startpath: + raise salt.exceptions.CommandExecutionError( + "File pattern(s) not found in path ancestry" + ) + + # Move up one level in the ancestry + startpath = os.path.dirname(startpath) diff --git a/tests/pytests/integration/ssh/test_slsutil.py b/tests/pytests/integration/ssh/test_slsutil.py new file mode 100644 index 00000000000..4ac9ed59a82 --- /dev/null +++ b/tests/pytests/integration/ssh/test_slsutil.py @@ -0,0 +1,94 @@ +import json + +import pytest + + +@pytest.mark.usefixtures("state_tree") +def test_renderer_file(salt_ssh_cli): + ret = salt_ssh_cli.run("slsutil.renderer", "salt://test.sls") + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert "Ok with def" in ret.data + + +def test_renderer_string(salt_ssh_cli): + rend = "{{ salt['test.echo']('foo') }}: {{ pillar['ext_spam'] }}" + ret = salt_ssh_cli.run("slsutil.renderer", string=rend) + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data == {"foo": "eggs"} + + +def test_serialize(salt_ssh_cli): + obj = {"foo": "bar"} + ret = salt_ssh_cli.run("slsutil.serialize", "json", obj) + assert ret.returncode == 0 + assert isinstance(ret.data, str) + assert ret.data == json.dumps(obj) + + +def test_deserialize(salt_ssh_cli): + obj = {"foo": "bar"} + data = json.dumps(obj) + # Need to quote it, otherwise it's deserialized by the + # test wrapper + ret = salt_ssh_cli.run("slsutil.deserialize", "json", f"'{data}'") + assert ret.returncode == 0 + assert isinstance(ret.data, type(obj)) + assert ret.data == obj + + +@pytest.mark.parametrize( + "path,expected", + [ + ("test_deep", True), + ("test_deep/test.sls", False), + ("test_deep/b/2", True), + ("does_not/ex/ist", False), + ], +) +def test_dir_exists(salt_ssh_cli, path, expected): + ret = salt_ssh_cli.run("slsutil.dir_exists", path) + assert ret.returncode == 0 + assert isinstance(ret.data, bool) + assert ret.data is expected + + +@pytest.mark.parametrize( + "path,expected", [("test_deep", False), ("test_deep/test.sls", True)] +) +def test_file_exists(salt_ssh_cli, path, expected): + ret = salt_ssh_cli.run("slsutil.file_exists", path) + assert ret.returncode == 0 + assert isinstance(ret.data, bool) + assert ret.data is expected + + +@pytest.mark.parametrize( + "start,name,expected", + [ + ("test_deep/b/2", "test.sls", "test_deep/b/2/test.sls"), + ("test_deep/b/2", "cheese", "cheese"), + ], +) +def test_findup(salt_ssh_cli, start, name, expected): + ret = salt_ssh_cli.run("slsutil.findup", start, name) + assert ret.returncode == 0 + assert isinstance(ret.data, str) + assert ret.data == expected + + +@pytest.mark.parametrize( + "path,expected", + [ + ("test_deep", True), + ("test_deep/test.sls", True), + ("test_deep/b/2", True), + ("does_not/ex/ist", False), + ], +) +def test_path_exists(salt_ssh_cli, path, expected): + ret = salt_ssh_cli.run("slsutil.path_exists", path) + assert ret.returncode == 0 + assert isinstance(ret.data, bool) + assert ret.data is expected diff --git a/tests/pytests/unit/client/ssh/wrapper/test_slsutil.py b/tests/pytests/unit/client/ssh/wrapper/test_slsutil.py new file mode 100644 index 00000000000..558d5ee74dc --- /dev/null +++ b/tests/pytests/unit/client/ssh/wrapper/test_slsutil.py @@ -0,0 +1,166 @@ +import contextlib +import logging + +import pytest + +import salt.client.ssh.wrapper.slsutil as slsutil +from salt.exceptions import CommandExecutionError, SaltInvocationError +from tests.support.mock import MagicMock + +log = logging.getLogger(__name__) + + +# --- These tests are adapted from tests.pytests.unit.utils.slsutil + + +@pytest.fixture +def configure_loader_modules(master_dirs, master_files): + return { + slsutil: { + "__salt__": { + "cp.list_master": MagicMock(return_value=master_files), + "cp.list_master_dirs": MagicMock(return_value=master_dirs), + }, + "__opts__": { + "renderer": "jinja|yaml", + "renderer_blacklist": [], + "renderer_whitelist": [], + }, + } + } + + +@pytest.fixture +def master_dirs(): + return ["red", "red/files", "blue", "blue/files"] + + +@pytest.fixture +def master_files(): + return [ + "top.sls", + "red/init.sls", + "red/files/default.conf", + "blue/init.sls", + "blue/files/default.conf", + ] + + +@pytest.mark.parametrize("inpt,expected", ((True, "yes"), (False, "no"))) +def test_boolstr(inpt, expected): + assert slsutil.boolstr(inpt, true="yes", false="no") == expected + + +@pytest.mark.parametrize( + "inpt,expected", (("red/init.sls", True), ("green/init.sls", False)) +) +def test_file_exists(inpt, expected): + assert slsutil.file_exists(inpt) is expected + + +@pytest.mark.parametrize("inpt,expected", (("red", True), ("green", False))) +def test_dir_exists(inpt, expected): + assert slsutil.dir_exists(inpt) is expected + + +@pytest.mark.parametrize( + "inpt,expected", + ( + ("red", True), + ("green", False), + ("red/init.sls", True), + ("green/init.sls", False), + ), +) +def test_path_exists(inpt, expected): + assert slsutil.path_exists(inpt) is expected + + +@pytest.mark.parametrize( + "inpt,expected,raises", + [ + (("red/files", "init.sls"), "red/init.sls", None), + (("red/files", ["top.sls"]), "top.sls", None), + (("", "top.sls"), "top.sls", None), + ((None, "top.sls"), "top.sls", None), + (("red/files", ["top.sls", "init.sls"]), "red/init.sls", None), + ( + ("red/files", "notfound"), + None, + pytest.raises( + CommandExecutionError, match=r"File pattern\(s\) not found.*" + ), + ), + ( + ("red", "default.conf"), + None, + pytest.raises( + CommandExecutionError, match=r"File pattern\(s\) not found.*" + ), + ), + ( + ("green", "notfound"), + None, + pytest.raises(SaltInvocationError, match="Starting path not found.*"), + ), + ( + ("red", 1234), + None, + pytest.raises( + SaltInvocationError, match=".*must be a string or list of strings.*" + ), + ), + ], +) +def test_findup(inpt, expected, raises): + if raises is None: + raises = contextlib.nullcontext() + with raises: + res = slsutil.findup(*inpt) + assert res == expected + + +@pytest.mark.parametrize( + "a,b,merge_lists,expected", + [ + ( + {"foo": {"bar": "baz", "hi": "there", "some": ["list"]}}, + {"foo": {"baz": "quux", "bar": "hi", "some": ["other_list"]}}, + False, + { + "foo": { + "baz": "quux", + "bar": "hi", + "hi": "there", + "some": ["other_list"], + } + }, + ), + ( + {"foo": {"bar": "baz", "hi": "there", "some": ["list"]}}, + {"foo": {"baz": "quux", "bar": "hi", "some": ["other_list"]}}, + True, + { + "foo": { + "baz": "quux", + "bar": "hi", + "hi": "there", + "some": ["list", "other_list"], + } + }, + ), + ], +) +@pytest.mark.parametrize("func", ("update", "merge", "merge_all")) +def test_update_merge(a, b, merge_lists, expected, func): + arg = (a, b) + if func == "merge_all": + arg = ([a, b],) + res = getattr(slsutil, func)(*arg, merge_lists=merge_lists) + assert res == expected + assert (a is res) is (func == "update") + + +def test_renderer_requires_either_path_or_string(): + with pytest.raises(SaltInvocationError, match=".*either path or string.*"): + slsutil.renderer() From e9ecf30500ffd3c1d0da961fe355601adea309b7 Mon Sep 17 00:00:00 2001 From: jeanluc Date: Tue, 7 Nov 2023 00:02:30 +0100 Subject: [PATCH 142/160] Add `defaults` SSH wrapper module This is a 1:1 copy of the execution module, incl. tests... (cherry picked from commit 47a609fab058105109159536d94b577d452155a0) --- changelog/51605.fixed.md | 1 + salt/client/ssh/wrapper/defaults.py | 240 ++++++++++++++++++ .../unit/client/ssh/wrapper/test_defaults.py | 215 ++++++++++++++++ 3 files changed, 456 insertions(+) create mode 100644 changelog/51605.fixed.md create mode 100644 salt/client/ssh/wrapper/defaults.py create mode 100644 tests/pytests/unit/client/ssh/wrapper/test_defaults.py diff --git a/changelog/51605.fixed.md b/changelog/51605.fixed.md new file mode 100644 index 00000000000..990b34413d9 --- /dev/null +++ b/changelog/51605.fixed.md @@ -0,0 +1 @@ +Fixed defaults.merge is not available when using salt-ssh diff --git a/salt/client/ssh/wrapper/defaults.py b/salt/client/ssh/wrapper/defaults.py new file mode 100644 index 00000000000..d03990b8798 --- /dev/null +++ b/salt/client/ssh/wrapper/defaults.py @@ -0,0 +1,240 @@ +""" +SSH wrapper module to work with salt formula defaults files + +""" + +import copy +import logging +import os + +import salt.fileclient +import salt.utils.data +import salt.utils.dictupdate as dictupdate +import salt.utils.files +import salt.utils.json +import salt.utils.url +import salt.utils.yaml + +__virtualname__ = "defaults" + +log = logging.getLogger(__name__) + + +def _mk_client(): + """ + Create a file client and add it to the context + """ + return salt.fileclient.get_file_client(__opts__) + + +def _load(formula): + """ + Generates a list of salt:///defaults.(json|yaml) files + and fetches them from the Salt master. + + Returns first defaults file as python dict. + """ + + # Compute possibilities + paths = [] + for ext in ("yaml", "json"): + source_url = salt.utils.url.create(formula + "/defaults." + ext) + paths.append(source_url) + # Fetch files from master + with _mk_client() as client: + defaults_files = client.cache_files(paths) + + for file_ in defaults_files: + if not file_: + # Skip empty string returned by cp.fileclient.cache_files. + continue + + suffix = file_.rsplit(".", 1)[-1] + if suffix == "yaml": + loader = salt.utils.yaml.safe_load + elif suffix == "json": + loader = salt.utils.json.load + else: + log.debug("Failed to determine loader for %r", file_) + continue + + if os.path.exists(file_): + log.debug("Reading defaults from %r", file_) + with salt.utils.files.fopen(file_) as fhr: + defaults = loader(fhr) + log.debug("Read defaults %r", defaults) + + return defaults or {} + + +def get(key, default=""): + """ + defaults.get is used much like pillar.get except that it will read + a default value for a pillar from defaults.json or defaults.yaml + files that are stored in the root of a salt formula. + + CLI Example: + + .. code-block:: bash + + salt '*' defaults.get core:users:root + + The defaults is computed from pillar key. The first entry is considered as + the formula namespace. + + For example, querying ``core:users:root`` will try to load + ``salt://core/defaults.yaml`` and ``salt://core/defaults.json``. + """ + + # Determine formula namespace from query + if ":" in key: + namespace, key = key.split(":", 1) + else: + namespace, key = key, None + + # Fetch and load defaults formula files from states. + defaults = _load(namespace) + + # Fetch value + if key: + return salt.utils.data.traverse_dict_and_list(defaults, key, default) + else: + return defaults + + +def merge(dest, src, merge_lists=False, in_place=True, convert_none=True): + """ + defaults.merge + Allows deep merging of dicts in formulas. + + merge_lists : False + If True, it will also merge lists instead of replace their items. + + in_place : True + If True, it will merge into dest dict, + if not it will make a new copy from that dict and return it. + + convert_none : True + If True, it will convert src and dest to empty dicts if they are None. + If True and dest is None but in_place is True, raises TypeError. + If False it will make a new copy from that dict and return it. + + .. versionadded:: 3005 + + CLI Example: + + .. code-block:: bash + + salt '*' defaults.merge '{a: b}' '{d: e}' + + It is more typical to use this in a templating language in formulas, + instead of directly on the command-line. + """ + # Force empty dicts if applicable (useful for cleaner templating) + src = {} if (src is None and convert_none) else src + if dest is None and convert_none: + if in_place: + raise TypeError("Can't perform in-place merge into NoneType") + else: + dest = {} + + if in_place: + merged = dest + else: + merged = copy.deepcopy(dest) + return dictupdate.update(merged, src, merge_lists=merge_lists) + + +def deepcopy(source): + """ + defaults.deepcopy + Allows deep copy of objects in formulas. + + By default, Python does not copy objects, + it creates bindings between a target and an object. + + It is more typical to use this in a templating language in formulas, + instead of directly on the command-line. + """ + return copy.deepcopy(source) + + +def update(dest, defaults, merge_lists=True, in_place=True, convert_none=True): + """ + defaults.update + Allows setting defaults for group of data set e.g. group for nodes. + + This function is a combination of defaults.merge + and defaults.deepcopy to avoid redundant in jinja. + + Example: + + .. code-block:: yaml + + group01: + defaults: + enabled: True + extra: + - test + - stage + nodes: + host01: + index: foo + upstream: bar + host02: + index: foo2 + upstream: bar2 + + .. code-block:: jinja + + {% do salt['defaults.update'](group01.nodes, group01.defaults) %} + + Each node will look like the following: + + .. code-block:: yaml + + host01: + enabled: True + index: foo + upstream: bar + extra: + - test + - stage + + merge_lists : True + If True, it will also merge lists instead of replace their items. + + in_place : True + If True, it will merge into dest dict. + if not it will make a new copy from that dict and return it. + + convert_none : True + If True, it will convert src and dest to empty dicts if they are None. + If True and dest is None but in_place is True, raises TypeError. + If False it will make a new copy from that dict and return it. + + .. versionadded:: 3005 + + It is more typical to use this in a templating language in formulas, + instead of directly on the command-line. + """ + # Force empty dicts if applicable here + if in_place: + if dest is None: + raise TypeError("Can't perform in-place update into NoneType") + else: + nodes = dest + else: + dest = {} if (dest is None and convert_none) else dest + nodes = deepcopy(dest) + + defaults = {} if (defaults is None and convert_none) else defaults + + for node_name, node_vars in nodes.items(): + defaults_vars = deepcopy(defaults) + node_vars = merge( + defaults_vars, node_vars, merge_lists=merge_lists, convert_none=convert_none + ) + nodes[node_name] = node_vars + + return nodes diff --git a/tests/pytests/unit/client/ssh/wrapper/test_defaults.py b/tests/pytests/unit/client/ssh/wrapper/test_defaults.py new file mode 100644 index 00000000000..12d07bc2a85 --- /dev/null +++ b/tests/pytests/unit/client/ssh/wrapper/test_defaults.py @@ -0,0 +1,215 @@ +""" +Test cases for salt.client.ssh.wrapper.defaults + +This has been copied 1:1 from tests.pytests.unit.modules.test_defaults +""" + +import inspect + +import pytest + +import salt.client.ssh.wrapper.defaults as defaults +from tests.support.mock import MagicMock, patch + + +@pytest.fixture() +def configure_loader_modules(): + return {defaults: {}} + + +def test_get_mock(): + """ + Test if it execute a defaults client run and return a dict + """ + with patch.object(inspect, "stack", MagicMock(return_value=[])), patch( + "salt.client.ssh.wrapper.defaults.get", + MagicMock(return_value={"users": {"root": [0]}}), + ): + assert defaults.get("core:users:root") == {"users": {"root": [0]}} + + +def test_merge_with_list_merging(): + """ + Test deep merging of dicts with merge_lists enabled. + """ + + src_dict = { + "string_key": "string_val_src", + "list_key": ["list_val_src"], + "dict_key": {"dict_key_src": "dict_val_src"}, + } + + dest_dict = { + "string_key": "string_val_dest", + "list_key": ["list_val_dest"], + "dict_key": {"dict_key_dest": "dict_val_dest"}, + } + + merged_dict = { + "string_key": "string_val_src", + "list_key": ["list_val_dest", "list_val_src"], + "dict_key": { + "dict_key_dest": "dict_val_dest", + "dict_key_src": "dict_val_src", + }, + } + + defaults.merge(dest_dict, src_dict, merge_lists=True) + assert dest_dict == merged_dict + + +def test_merge_without_list_merging(): + """ + Test deep merging of dicts with merge_lists disabled. + """ + + src = { + "string_key": "string_val_src", + "list_key": ["list_val_src"], + "dict_key": {"dict_key_src": "dict_val_src"}, + } + + dest = { + "string_key": "string_val_dest", + "list_key": ["list_val_dest"], + "dict_key": {"dict_key_dest": "dict_val_dest"}, + } + + merged = { + "string_key": "string_val_src", + "list_key": ["list_val_src"], + "dict_key": { + "dict_key_dest": "dict_val_dest", + "dict_key_src": "dict_val_src", + }, + } + + defaults.merge(dest, src, merge_lists=False) + assert dest == merged + + +def test_merge_not_in_place(): + """ + Test deep merging of dicts not in place. + """ + + src = {"nested_dict": {"A": "A"}} + + dest = {"nested_dict": {"B": "B"}} + + dest_orig = {"nested_dict": {"B": "B"}} + + merged = {"nested_dict": {"A": "A", "B": "B"}} + + final = defaults.merge(dest, src, in_place=False) + assert dest == dest_orig + assert final == merged + + +def test_merge_src_is_none(): + """ + Test deep merging of dicts not in place. + """ + + dest = {"nested_dict": {"B": "B"}} + + dest_orig = {"nested_dict": {"B": "B"}} + + final = defaults.merge(dest, None, in_place=False) + assert dest == dest_orig + assert final == dest_orig + + +def test_merge_dest_is_none(): + """ + Test deep merging of dicts not in place. + """ + + src = {"nested_dict": {"B": "B"}} + + src_orig = {"nested_dict": {"B": "B"}} + + final = defaults.merge(None, src, in_place=False) + assert src == src_orig + assert final == src_orig + + +def test_merge_in_place_dest_is_none(): + """ + Test deep merging of dicts not in place. + """ + + src = {"nested_dict": {"B": "B"}} + + pytest.raises(TypeError, defaults.merge, None, src) + + +def test_deepcopy(): + """ + Test a deep copy of object. + """ + + src = {"A": "A", "B": "B"} + + dist = defaults.deepcopy(src) + dist.update({"C": "C"}) + + result = {"A": "A", "B": "B", "C": "C"} + + assert src != dist + assert dist == result + + +def test_update_in_place(): + """ + Test update with defaults values in place. + """ + + group01 = { + "defaults": {"enabled": True, "extra": ["test", "stage"]}, + "nodes": {"host01": {"index": "foo", "upstream": "bar"}}, + } + + host01 = { + "enabled": True, + "index": "foo", + "upstream": "bar", + "extra": ["test", "stage"], + } + + defaults.update(group01["nodes"], group01["defaults"]) + assert group01["nodes"]["host01"] == host01 + + +def test_update_with_defaults_none(): + group01 = { + "defaults": {"enabled": True, "extra": ["test", "stage"]}, + "nodes": {"host01": {"index": "foo", "upstream": "bar"}}, + } + + host01 = { + "index": "foo", + "upstream": "bar", + } + + defaults.update(group01["nodes"], None) + assert group01["nodes"]["host01"] == host01 + + +def test_update_with_dest_none(): + group01 = { + "defaults": {"enabled": True, "extra": ["test", "stage"]}, + "nodes": {"host01": {"index": "foo", "upstream": "bar"}}, + } + + ret = defaults.update(None, group01["defaults"], in_place=False) + assert ret == {} + + +def test_update_in_place_with_dest_none(): + group01 = { + "defaults": {"enabled": True, "extra": ["test", "stage"]}, + "nodes": {"host01": {"index": "foo", "upstream": "bar"}}, + } + + pytest.raises(TypeError, defaults.update, None, group01["defaults"]) From 6c62792c73d1779dee1d5808730b2ed314199ec4 Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 6 Nov 2023 12:42:55 +0100 Subject: [PATCH 143/160] Sync config SSH wrapper with execution module The wrapper has diverged significantly from the module. * `option` did not check grains * `option` did not have `omit_all` and `wildcard` parameters * `get` missed several parameters: `delimiter`, `merge` and all `omit_*` * There was no wrapping function for `items`. (cherry picked from commit 8356be888bf32e2f4b081c54a6a56b21d5cf833c) --- changelog/56441.fixed.md | 1 + salt/client/ssh/wrapper/config.py | 375 +++++++++++++++--- tests/pytests/integration/ssh/test_config.py | 66 +++ .../unit/client/ssh/wrapper/test_config.py | 219 ++++++++++ 4 files changed, 613 insertions(+), 48 deletions(-) create mode 100644 changelog/56441.fixed.md create mode 100644 tests/pytests/integration/ssh/test_config.py create mode 100644 tests/pytests/unit/client/ssh/wrapper/test_config.py diff --git a/changelog/56441.fixed.md b/changelog/56441.fixed.md new file mode 100644 index 00000000000..489ad80f770 --- /dev/null +++ b/changelog/56441.fixed.md @@ -0,0 +1 @@ +Fixed config.get does not support merge option with salt-ssh diff --git a/salt/client/ssh/wrapper/config.py b/salt/client/ssh/wrapper/config.py index dcc00ceb2c3..a6db176453c 100644 --- a/salt/client/ssh/wrapper/config.py +++ b/salt/client/ssh/wrapper/config.py @@ -2,17 +2,22 @@ Return config information """ +import copy +import fnmatch +import logging import os -import re +import urllib.parse import salt.syspaths as syspaths import salt.utils.data import salt.utils.files +import salt.utils.sdb as sdb + +log = logging.getLogger(__name__) # Set up the default values for all systems DEFAULTS = { "mongo.db": "salt", - "mongo.host": "salt", "mongo.password": "", "mongo.port": 27017, "mongo.user": "", @@ -38,9 +43,12 @@ DEFAULTS = { "solr.num_backups": 1, "poudriere.config": "/usr/local/etc/poudriere.conf", "poudriere.config_dir": "/usr/local/etc/poudriere.d", + "ldap.uri": "", "ldap.server": "localhost", "ldap.port": "389", "ldap.tls": False, + "ldap.no_verify": False, + "ldap.anonymous": True, "ldap.scope": 2, "ldap.attrs": None, "ldap.binddn": "", @@ -51,6 +59,11 @@ DEFAULTS = { "tunnel": False, "images": os.path.join(syspaths.SRV_ROOT_DIR, "salt-images"), }, + "docker.exec_driver": "docker-exec", + "docker.compare_container_networks": { + "static": ["Aliases", "Links", "IPAMConfig"], + "automatic": ["IPAddress", "Gateway", "GlobalIPv6Address", "IPv6Gateway"], + }, } @@ -96,15 +109,66 @@ def valid_fileproto(uri): salt '*' config.valid_fileproto salt://path/to/file """ - try: - return bool(re.match("^(?:salt|https?|ftp)://", uri)) - except Exception: # pylint: disable=broad-except - return False + return urllib.parse.urlparse(uri).scheme in salt.utils.files.VALID_PROTOS -def option(value, default="", omit_opts=False, omit_master=False, omit_pillar=False): +def option( + value, + default=None, + omit_opts=False, + omit_grains=False, + omit_pillar=False, + omit_master=False, + omit_all=False, + wildcard=False, +): """ - Pass in a generic option and receive the value that will be assigned + Returns the setting for the specified config value. The priority for + matches is the same as in :py:func:`config.get `, + only this function does not recurse into nested data structures. Another + difference between this function and :py:func:`config.get + ` is that it comes with a set of "sane defaults". + To view these, you can run the following command: + + .. code-block:: bash + + salt '*' config.option '*' omit_all=True wildcard=True + + default + The default value if no match is found. If not specified, then the + fallback default will be an empty string, unless ``wildcard=True``, in + which case the return will be an empty dictionary. + + omit_opts : False + Pass as ``True`` to exclude matches from the minion configuration file + + omit_grains : False + Pass as ``True`` to exclude matches from the grains + + omit_pillar : False + Pass as ``True`` to exclude matches from the pillar data + + omit_master : False + Pass as ``True`` to exclude matches from the master configuration file + + omit_all : True + Shorthand to omit all of the above and return matches only from the + "sane defaults". + + .. versionadded:: 3000 + + wildcard : False + If used, this will perform pattern matching on keys. Note that this + will also significantly change the return data. Instead of only a value + being returned, a dictionary mapping the matched keys to their values + is returned. For example, using ``wildcard=True`` with a ``key`` of + ``'foo.ba*`` could return a dictionary like so: + + .. code-block:: python + + {'foo.bar': True, 'foo.baz': False} + + .. versionadded:: 3000 CLI Example: @@ -112,18 +176,48 @@ def option(value, default="", omit_opts=False, omit_master=False, omit_pillar=Fa salt '*' config.option redis.host """ - if not omit_opts: - if value in __opts__: - return __opts__[value] - if not omit_master: - if value in __pillar__.get("master", {}): - return __pillar__["master"][value] - if not omit_pillar: - if value in __pillar__: - return __pillar__[value] - if value in DEFAULTS: - return DEFAULTS[value] - return default + if omit_all: + omit_opts = omit_grains = omit_pillar = omit_master = True + + if default is None: + default = "" if not wildcard else {} + + if not wildcard: + if not omit_opts: + if value in __opts__: + return __opts__[value] + if not omit_grains: + if value in __grains__: + return __grains__[value] + if not omit_pillar: + if value in __pillar__: + return __pillar__[value] + if not omit_master: + if value in __pillar__.get("master", {}): + return __pillar__["master"][value] + if value in DEFAULTS: + return DEFAULTS[value] + + # No match + return default + else: + # We need to do the checks in the reverse order so that minion opts + # takes precedence + ret = {} + for omit, data in ( + (omit_master, __pillar__.get("master", {})), + (omit_pillar, __pillar__), + (omit_grains, __grains__), + (omit_opts, __opts__), + ): + if not omit: + ret.update({x: data[x] for x in fnmatch.filter(data, value)}) + # Check the DEFAULTS as well to see if the pattern matches it + for item in (x for x in fnmatch.filter(DEFAULTS, value) if x not in ret): + ret[item] = DEFAULTS[item] + + # If no matches, return the default + return ret or default def merge(value, default="", omit_opts=False, omit_master=False, omit_pillar=False): @@ -171,54 +265,223 @@ def merge(value, default="", omit_opts=False, omit_master=False, omit_pillar=Fal ret = list(ret) + list(tmp) if ret is None and value in DEFAULTS: return DEFAULTS[value] - return ret or default + if ret is None: + return default + return ret -def get(key, default=""): +def get( + key, + default="", + delimiter=":", + merge=None, + omit_opts=False, + omit_pillar=False, + omit_master=False, + omit_grains=False, +): """ .. versionadded:: 0.14.0 - Attempt to retrieve the named value from opts, pillar, grains of the master - config, if the named value is not available return the passed default. - The default return is an empty string. + Attempt to retrieve the named value from the minion config file, pillar, + grains or the master config. If the named value is not available, return + the value specified by the ``default`` argument. If this argument is not + specified, ``default`` falls back to an empty string. - The value can also represent a value in a nested dict using a ":" delimiter - for the dict. This means that if a dict looks like this:: + Values can also be retrieved from nested dictionaries. Assume the below + data structure: + + .. code-block:: python {'pkg': {'apache': 'httpd'}} - To retrieve the value associated with the apache key in the pkg dict this - key can be passed:: + To retrieve the value associated with the ``apache`` key, in the + sub-dictionary corresponding to the ``pkg`` key, the following command can + be used: - pkg:apache + .. code-block:: bash - This routine traverses these data stores in this order: + salt myminion config.get pkg:apache - - Local minion config (opts) + The ``:`` (colon) is used to represent a nested dictionary level. + + .. versionchanged:: 2015.5.0 + The ``delimiter`` argument was added, to allow delimiters other than + ``:`` to be used. + + This function traverses these data stores in this order, returning the + first match found: + + - Minion configuration - Minion's grains - - Minion's pillar - - Master config + - Minion's pillar data + - Master configuration (requires :conf_minion:`pillar_opts` to be set to + ``True`` in Minion config file in order to work) + + This means that if there is a value that is going to be the same for the + majority of minions, it can be configured in the Master config file, and + then overridden using the grains, pillar, or Minion config file. + + Adding config options to the Master or Minion configuration file is easy: + + .. code-block:: yaml + + my-config-option: value + cafe-menu: + - egg and bacon + - egg sausage and bacon + - egg and spam + - egg bacon and spam + - egg bacon sausage and spam + - spam bacon sausage and spam + - spam egg spam spam bacon and spam + - spam sausage spam spam bacon spam tomato and spam + + .. note:: + Minion configuration options built into Salt (like those defined + :ref:`here `) will *always* be defined in + the Minion configuration and thus *cannot be overridden by grains or + pillar data*. However, additional (user-defined) configuration options + (as in the above example) will not be in the Minion configuration by + default and thus can be overridden using grains/pillar data by leaving + the option out of the minion config file. + + **Arguments** + + delimiter + .. versionadded:: 2015.5.0 + + Override the delimiter used to separate nested levels of a data + structure. + + merge + .. versionadded:: 2015.5.0 + + If passed, this parameter will change the behavior of the function so + that, instead of traversing each data store above in order and + returning the first match, the data stores are first merged together + and then searched. The pillar data is merged into the master config + data, then the grains are merged, followed by the Minion config data. + The resulting data structure is then searched for a match. This allows + for configurations to be more flexible. + + .. note:: + + The merging described above does not mean that grain data will end + up in the Minion's pillar data, or pillar data will end up in the + master config data, etc. The data is just combined for the purposes + of searching an amalgam of the different data stores. + + The supported merge strategies are as follows: + + - **recurse** - If a key exists in both dictionaries, and the new value + is not a dictionary, it is replaced. Otherwise, the sub-dictionaries + are merged together into a single dictionary, recursively on down, + following the same criteria. For example: + + .. code-block:: python + + >>> dict1 = {'foo': {'bar': 1, 'qux': True}, + 'hosts': ['a', 'b', 'c'], + 'only_x': None} + >>> dict2 = {'foo': {'baz': 2, 'qux': False}, + 'hosts': ['d', 'e', 'f'], + 'only_y': None} + >>> merged + {'foo': {'bar': 1, 'baz': 2, 'qux': False}, + 'hosts': ['d', 'e', 'f'], + 'only_dict1': None, + 'only_dict2': None} + + - **overwrite** - If a key exists in the top level of both + dictionaries, the new value completely overwrites the old. For + example: + + .. code-block:: python + + >>> dict1 = {'foo': {'bar': 1, 'qux': True}, + 'hosts': ['a', 'b', 'c'], + 'only_x': None} + >>> dict2 = {'foo': {'baz': 2, 'qux': False}, + 'hosts': ['d', 'e', 'f'], + 'only_y': None} + >>> merged + {'foo': {'baz': 2, 'qux': False}, + 'hosts': ['d', 'e', 'f'], + 'only_dict1': None, + 'only_dict2': None} CLI Example: .. code-block:: bash salt '*' config.get pkg:apache + salt '*' config.get lxc.container_profile:centos merge=recurse """ - ret = salt.utils.data.traverse_dict_and_list(__opts__, key, "_|-") - if ret != "_|-": - return ret - ret = salt.utils.data.traverse_dict_and_list(__grains__, key, "_|-") - if ret != "_|-": - return ret - ret = salt.utils.data.traverse_dict_and_list(__pillar__, key, "_|-") - if ret != "_|-": - return ret - ret = salt.utils.data.traverse_dict_and_list( - __pillar__.get("master", {}), key, "_|-" - ) - if ret != "_|-": - return ret + if merge is None: + if not omit_opts: + ret = salt.utils.data.traverse_dict_and_list( + __opts__, key, "_|-", delimiter=delimiter + ) + if ret != "_|-": + return sdb.sdb_get(ret, __opts__) + + if not omit_grains: + ret = salt.utils.data.traverse_dict_and_list( + __grains__, key, "_|-", delimiter + ) + if ret != "_|-": + return sdb.sdb_get(ret, __opts__) + + if not omit_pillar: + ret = salt.utils.data.traverse_dict_and_list( + __pillar__, key, "_|-", delimiter=delimiter + ) + if ret != "_|-": + return sdb.sdb_get(ret, __opts__) + + if not omit_master: + ret = salt.utils.data.traverse_dict_and_list( + __pillar__.get("master", {}), key, "_|-", delimiter=delimiter + ) + if ret != "_|-": + return sdb.sdb_get(ret, __opts__) + + ret = salt.utils.data.traverse_dict_and_list( + DEFAULTS, key, "_|-", delimiter=delimiter + ) + if ret != "_|-": + return sdb.sdb_get(ret, __opts__) + else: + if merge not in ("recurse", "overwrite"): + log.warning( + "Unsupported merge strategy '%s'. Falling back to 'recurse'.", merge + ) + merge = "recurse" + + merge_lists = salt.config.master_config("/etc/salt/master").get( + "pillar_merge_lists" + ) + + data = copy.copy(DEFAULTS) + data = salt.utils.dictupdate.merge( + data, __pillar__.get("master", {}), strategy=merge, merge_lists=merge_lists + ) + data = salt.utils.dictupdate.merge( + data, __pillar__, strategy=merge, merge_lists=merge_lists + ) + data = salt.utils.dictupdate.merge( + data, __grains__, strategy=merge, merge_lists=merge_lists + ) + data = salt.utils.dictupdate.merge( + data, __opts__, strategy=merge, merge_lists=merge_lists + ) + ret = salt.utils.data.traverse_dict_and_list( + data, key, "_|-", delimiter=delimiter + ) + if ret != "_|-": + return sdb.sdb_get(ret, __opts__) + return default @@ -241,3 +504,19 @@ def dot_vals(value): if key.startswith(f"{value}."): ret[key] = val return ret + + +def items(): + """ + Return the complete config from the currently running minion process. + This includes defaults for values not set in the config file. + + CLI Example: + + .. code-block:: bash + + salt '*' config.items + """ + # This would otherwise be parsed as just the value of "local" in opts. + # In case the wfunc parsing is improved, this can be removed. + return {"local": {"return": __opts__.copy()}} diff --git a/tests/pytests/integration/ssh/test_config.py b/tests/pytests/integration/ssh/test_config.py new file mode 100644 index 00000000000..d3ae2b03a3e --- /dev/null +++ b/tests/pytests/integration/ssh/test_config.py @@ -0,0 +1,66 @@ +import pytest + +pytestmark = [pytest.mark.slow_test] + + +def test_items(salt_ssh_cli): + ret = salt_ssh_cli.run("config.items") + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert "id" in ret.data + assert "grains" in ret.data + assert "__master_opts__" in ret.data + assert "cachedir" in ret.data + + +@pytest.mark.parametrize("omit", (False, True)) +def test_option_minion_opt(salt_ssh_cli, omit): + # Minion opt + ret = salt_ssh_cli.run("config.option", "id", omit_opts=omit, omit_grains=True) + assert ret.returncode == 0 + assert (ret.data != salt_ssh_cli.get_minion_tgt()) is omit + assert (ret.data == "") is omit + + +@pytest.mark.parametrize("omit", (False, True)) +def test_option_pillar(salt_ssh_cli, omit): + ret = salt_ssh_cli.run("config.option", "ext_spam", omit_pillar=omit) + assert ret.returncode == 0 + assert (ret.data != "eggs") is omit + assert (ret.data == "") is omit + + +@pytest.mark.parametrize("omit", (False, True)) +def test_option_grain(salt_ssh_cli, omit): + ret = salt_ssh_cli.run("config.option", "kernel", omit_grains=omit) + assert ret.returncode == 0 + assert ( + ret.data not in ("Darwin", "Linux", "FreeBSD", "OpenBSD", "Windows") + ) is omit + assert (ret.data == "") is omit + + +@pytest.mark.parametrize("omit", (False, True)) +def test_get_minion_opt(salt_ssh_cli, omit): + ret = salt_ssh_cli.run("config.get", "cachedir", omit_master=True, omit_opts=omit) + assert ret.returncode == 0 + assert (ret.data == "") is omit + assert ("minion" not in ret.data) is omit + + +@pytest.mark.parametrize("omit", (False, True)) +def test_get_pillar(salt_ssh_cli, omit): + ret = salt_ssh_cli.run("config.get", "ext_spam", omit_pillar=omit) + assert ret.returncode == 0 + assert (ret.data != "eggs") is omit + assert (ret.data == "") is omit + + +@pytest.mark.parametrize("omit", (False, True)) +def test_get_grain(salt_ssh_cli, omit): + ret = salt_ssh_cli.run("config.get", "kernel", omit_grains=omit) + assert ret.returncode == 0 + assert ( + ret.data not in ("Darwin", "Linux", "FreeBSD", "OpenBSD", "Windows") + ) is omit + assert (ret.data == "") is omit diff --git a/tests/pytests/unit/client/ssh/wrapper/test_config.py b/tests/pytests/unit/client/ssh/wrapper/test_config.py new file mode 100644 index 00000000000..64e89c762ad --- /dev/null +++ b/tests/pytests/unit/client/ssh/wrapper/test_config.py @@ -0,0 +1,219 @@ +""" + Taken 1:1 from test cases for salt.modules.config + This tests the SSH wrapper module. +""" + + +import fnmatch + +import pytest + +import salt.client.ssh.wrapper.config as config +from tests.support.mock import patch + + +@pytest.fixture +def defaults(): + return { + "test.option.foo": "value of test.option.foo in defaults", + "test.option.bar": "value of test.option.bar in defaults", + "test.option.baz": "value of test.option.baz in defaults", + "test.option": "value of test.option in defaults", + } + + +@pytest.fixture +def no_match(): + return "test.option.nope" + + +@pytest.fixture +def opt_name(): + return "test.option.foo" + + +@pytest.fixture +def wildcard_opt_name(): + return "test.option.b*" + + +@pytest.fixture +def configure_loader_modules(): + return { + config: { + "__opts__": { + "test.option.foo": "value of test.option.foo in __opts__", + "test.option.bar": "value of test.option.bar in __opts__", + "test.option.baz": "value of test.option.baz in __opts__", + }, + "__pillar__": { + "test.option.foo": "value of test.option.foo in __pillar__", + "test.option.bar": "value of test.option.bar in __pillar__", + "test.option.baz": "value of test.option.baz in __pillar__", + "master": { + "test.option.foo": "value of test.option.foo in master", + "test.option.bar": "value of test.option.bar in master", + "test.option.baz": "value of test.option.baz in master", + }, + }, + "__grains__": { + "test.option.foo": "value of test.option.foo in __grains__", + "test.option.bar": "value of test.option.bar in __grains__", + "test.option.baz": "value of test.option.baz in __grains__", + }, + } + } + + +def _wildcard_match(data, wildcard_opt_name): + return {x: data[x] for x in fnmatch.filter(data, wildcard_opt_name)} + + +def test_defaults_only_name(defaults): + with patch.dict(config.DEFAULTS, defaults): + opt_name = "test.option" + opt = config.option(opt_name) + assert opt == config.DEFAULTS[opt_name] + + +def test_no_match(defaults, no_match, wildcard_opt_name): + """ + Make sure that the defa + """ + with patch.dict(config.DEFAULTS, defaults): + ret = config.option(no_match) + assert ret == "", ret + + default = "wat" + ret = config.option(no_match, default=default) + assert ret == default, ret + + ret = config.option(no_match, wildcard=True) + assert ret == {}, ret + + default = {"foo": "bar"} + ret = config.option(no_match, default=default, wildcard=True) + assert ret == default, ret + + # Should be no match since wildcard=False + ret = config.option(wildcard_opt_name) + assert ret == "", ret + + +def test_omits(defaults, opt_name, wildcard_opt_name): + with patch.dict(config.DEFAULTS, defaults): + + # ********** OMIT NOTHING ********** + + # Match should be in __opts__ dict + ret = config.option(opt_name) + assert ret == config.__opts__[opt_name], ret + + # Wildcard match + ret = config.option(wildcard_opt_name, wildcard=True) + assert ret == _wildcard_match(config.__opts__, wildcard_opt_name), ret + + # ********** OMIT __opts__ ********** + + # Match should be in __grains__ dict + ret = config.option(opt_name, omit_opts=True) + assert ret == config.__grains__[opt_name], ret + + # Wildcard match + ret = config.option(wildcard_opt_name, omit_opts=True, wildcard=True) + assert ret == _wildcard_match(config.__grains__, wildcard_opt_name), ret + + # ********** OMIT __opts__, __grains__ ********** + + # Match should be in __pillar__ dict + ret = config.option(opt_name, omit_opts=True, omit_grains=True) + assert ret == config.__pillar__[opt_name], ret + + # Wildcard match + ret = config.option( + wildcard_opt_name, omit_opts=True, omit_grains=True, wildcard=True + ) + assert ret == _wildcard_match(config.__pillar__, wildcard_opt_name), ret + + # ********** OMIT __opts__, __grains__, __pillar__ ********** + + # Match should be in master opts + ret = config.option( + opt_name, omit_opts=True, omit_grains=True, omit_pillar=True + ) + assert ret == config.__pillar__["master"][opt_name], ret + + # Wildcard match + ret = config.option( + wildcard_opt_name, + omit_opts=True, + omit_grains=True, + omit_pillar=True, + wildcard=True, + ) + assert ret == _wildcard_match( + config.__pillar__["master"], wildcard_opt_name + ), ret + + # ********** OMIT ALL THE THINGS ********** + + # Match should be in master opts + ret = config.option( + opt_name, + omit_opts=True, + omit_grains=True, + omit_pillar=True, + omit_master=True, + ) + assert ret == config.DEFAULTS[opt_name], ret + + # Wildcard match + ret = config.option( + wildcard_opt_name, + omit_opts=True, + omit_grains=True, + omit_pillar=True, + omit_master=True, + wildcard=True, + ) + assert ret == _wildcard_match(config.DEFAULTS, wildcard_opt_name), ret + + # Match should be in master opts + ret = config.option(opt_name, omit_all=True) + assert ret == config.DEFAULTS[opt_name], ret + + # Wildcard match + ret = config.option(wildcard_opt_name, omit_all=True, wildcard=True) + assert ret == _wildcard_match(config.DEFAULTS, wildcard_opt_name), ret + + +# --- Additional tests not found in the execution module tests + + +@pytest.mark.parametrize("backup", ("", "minion", "master", "both")) +def test_backup_mode(backup): + res = config.backup_mode(backup) + assert res == backup or "minion" + + +@pytest.mark.parametrize( + "uri,expected", + (("salt://my/foo.txt", True), ("mysql://foo:bar@foo.bar/baz", False)), +) +def test_valid_fileproto(uri, expected): + res = config.valid_fileproto(uri) + assert res is expected + + +def test_dot_vals(): + extra_master_opt = ("test.option.baah", "value of test.option.baah in master") + with patch.dict(config.__pillar__, {"master": dict((extra_master_opt,))}): + res = config.dot_vals("test") + assert isinstance(res, dict) + assert res + for var in ("foo", "bar", "baz"): + key = f"test.option.{var}" + assert key in res + assert res[key] == f"value of test.option.{var} in __opts__" + assert extra_master_opt[0] in res + assert res[extra_master_opt[0]] == extra_master_opt[1] From 512f61d573901098a7bba182943a5d5debf4ae50 Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 6 Nov 2023 12:37:23 +0100 Subject: [PATCH 144/160] Add state.test to SSH wrapper (cherry picked from commit 82f90e2f15cb93fd7094a04b1493f74450b39d0c) --- changelog/61100.fixed.md | 1 + salt/client/ssh/wrapper/state.py | 15 ++++ tests/pytests/integration/ssh/conftest.py | 68 +++++++++++++++++++ .../integration/ssh/state/test_state.py | 10 +++ 4 files changed, 94 insertions(+) create mode 100644 changelog/61100.fixed.md diff --git a/changelog/61100.fixed.md b/changelog/61100.fixed.md new file mode 100644 index 00000000000..d7ac2b6bc3f --- /dev/null +++ b/changelog/61100.fixed.md @@ -0,0 +1 @@ +Fixed state.test does not work with salt-ssh diff --git a/salt/client/ssh/wrapper/state.py b/salt/client/ssh/wrapper/state.py index aa61e07f81e..ece4ee92c3a 100644 --- a/salt/client/ssh/wrapper/state.py +++ b/salt/client/ssh/wrapper/state.py @@ -1317,3 +1317,18 @@ def single(fun, name, test=None, **kwargs): # If for some reason the json load fails, return the stdout return stdout + + +def test(*args, **kwargs): + """ + .. versionadded:: 3001 + + Alias for `state.apply` with the kwarg `test` forced to `True`. + + This is a nicety to avoid the need to type out `test=True` and the possibility of + a typo causing changes you do not intend. + """ + kwargs["test"] = True + ret = apply_(*args, **kwargs) + + return ret diff --git a/tests/pytests/integration/ssh/conftest.py b/tests/pytests/integration/ssh/conftest.py index 7c1ceeba7ca..b0028efee17 100644 --- a/tests/pytests/integration/ssh/conftest.py +++ b/tests/pytests/integration/ssh/conftest.py @@ -25,3 +25,71 @@ def _reap_stray_processes(): with reap_stray_processes(): # Run test yield + + +@pytest.fixture(scope="module") +def state_tree(base_env_state_tree_root_dir): + top_file = """ + {%- from "map.jinja" import abc with context %} + base: + 'localhost': + - basic + '127.0.0.1': + - basic + """ + map_file = """ + {%- set abc = "def" %} + """ + state_file = """ + {%- from "map.jinja" import abc with context %} + Ok with {{ abc }}: + test.succeed_with_changes + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + map_tempfile = pytest.helpers.temp_file( + "map.jinja", map_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "test.sls", state_file, base_env_state_tree_root_dir + ) + with top_tempfile, map_tempfile, state_tempfile: + yield + + +@pytest.fixture(scope="module") +def state_tree_dir(base_env_state_tree_root_dir): + """ + State tree with files to test salt-ssh + when the map.jinja file is in another directory + """ + top_file = """ + {%- from "test/map.jinja" import abc with context %} + base: + 'localhost': + - test + '127.0.0.1': + - test + """ + map_file = """ + {%- set abc = "def" %} + """ + state_file = """ + {%- from "test/map.jinja" import abc with context %} + + Ok with {{ abc }}: + test.succeed_without_changes + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + map_tempfile = pytest.helpers.temp_file( + "test/map.jinja", map_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "test.sls", state_file, base_env_state_tree_root_dir + ) + + with top_tempfile, map_tempfile, state_tempfile: + yield diff --git a/tests/pytests/integration/ssh/state/test_state.py b/tests/pytests/integration/ssh/state/test_state.py index 62e8cbf513b..a7ebb22a601 100644 --- a/tests/pytests/integration/ssh/state/test_state.py +++ b/tests/pytests/integration/ssh/state/test_state.py @@ -101,3 +101,13 @@ def test_state_high(salt_ssh_cli): """ ret = salt_ssh_cli.run("state.high", '{"echo blah": {"cmd": ["run"]}}') assert ret.data["cmd_|-echo blah_|-echo blah_|-run"]["changes"]["stdout"] == "blah" + + +def test_state_test(salt_ssh_cli, state_tree): + ret = salt_ssh_cli.run("state.test", "test") + assert ret.returncode == 0 + assert ret.data + assert ( + ret.data["test_|-Ok with def_|-Ok with def_|-succeed_with_changes"]["result"] + is None + ) From fbf345e0d4ab685276fbe215e80cfd07e43310cf Mon Sep 17 00:00:00 2001 From: jeanluc Date: Tue, 28 Nov 2023 21:46:14 +0100 Subject: [PATCH 145/160] Add logmod SSH wrapper (cherry picked from commit 18bc40c77af75c491d2009ad851e86287fc3f6dd) --- changelog/65630.fixed.md | 1 + salt/client/ssh/wrapper/logmod.py | 79 +++++++++++++++++++++++++++++++ 2 files changed, 80 insertions(+) create mode 100644 changelog/65630.fixed.md create mode 100644 salt/client/ssh/wrapper/logmod.py diff --git a/changelog/65630.fixed.md b/changelog/65630.fixed.md new file mode 100644 index 00000000000..e8650abcdc1 --- /dev/null +++ b/changelog/65630.fixed.md @@ -0,0 +1 @@ +Added SSH wrapper for logmod diff --git a/salt/client/ssh/wrapper/logmod.py b/salt/client/ssh/wrapper/logmod.py new file mode 100644 index 00000000000..911fd7a1d4c --- /dev/null +++ b/salt/client/ssh/wrapper/logmod.py @@ -0,0 +1,79 @@ +""" +On-demand logging +================= + +.. versionadded:: 2017.7.0 + +The sole purpose of this module is logging messages in the minion. +It comes very handy when debugging complex Jinja templates, for example: + +.. code-block:: jinja + + {%- for var in range(10) %} + {%- do salt["log.info"](var) -%} + {%- endfor %} + +CLI Example: + +.. code-block:: bash + + salt '*' log.error "Please don't do that, this module is not for CLI use!" +""" + +import logging + +log = logging.getLogger(__name__) + +__virtualname__ = "log" + + +def __virtual__(): + return __virtualname__ + + +def debug(message): + """ + Log message at level DEBUG. + """ + log.debug(message) + return True + + +def info(message): + """ + Log message at level INFO. + """ + log.info(message) + return True + + +def warning(message): + """ + Log message at level WARNING. + """ + log.warning(message) + return True + + +def error(message): + """ + Log message at level ERROR. + """ + log.error(message) + return True + + +def critical(message): + """ + Log message at level CRITICAL. + """ + log.critical(message) + return True + + +def exception(message): + """ + Log message at level EXCEPTION. + """ + log.exception(message) + return True From db1dfb6784df26caccdce710a3423f99ced48626 Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 6 Nov 2023 15:27:17 +0100 Subject: [PATCH 146/160] Fix unused var in `grains.get` wrapper `ordered=False` would not have worked before (cherry picked from commit 5e16d8483458d4f58dee2d968a781c8e006633ee) --- salt/client/ssh/wrapper/grains.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/salt/client/ssh/wrapper/grains.py b/salt/client/ssh/wrapper/grains.py index 400131e1517..70d2c7e72da 100644 --- a/salt/client/ssh/wrapper/grains.py +++ b/salt/client/ssh/wrapper/grains.py @@ -72,9 +72,7 @@ def get(key, default="", delimiter=DEFAULT_TARGET_DELIM, ordered=True): grains = __grains__.value() else: grains = salt.utils.json.loads(salt.utils.json.dumps(__grains__.value())) - return salt.utils.data.traverse_dict_and_list( - __grains__.value(), key, default, delimiter - ) + return salt.utils.data.traverse_dict_and_list(grains, key, default, delimiter) def has_value(key): From 8e54f2833307201f037e665726ffaf127e7d236c Mon Sep 17 00:00:00 2001 From: jeanluc Date: Mon, 24 Jun 2024 09:02:26 +0200 Subject: [PATCH 147/160] Fix state.test test, run pre-commit The test fix was part of 6a715107fa89b060175ef0c0ae0827b409411543 --- salt/client/ssh/wrapper/slsutil.py | 8 ++++---- tests/pytests/integration/ssh/state/conftest.py | 2 +- tests/pytests/integration/ssh/test_config.py | 4 +++- tests/pytests/unit/client/ssh/wrapper/test_config.py | 1 - 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/salt/client/ssh/wrapper/slsutil.py b/salt/client/ssh/wrapper/slsutil.py index e09ca1c2984..586d09ad2d6 100644 --- a/salt/client/ssh/wrapper/slsutil.py +++ b/salt/client/ssh/wrapper/slsutil.py @@ -173,7 +173,7 @@ def renderer(path=None, string=None, default_renderer="jinja|yaml", **kwargs): default_renderer, __opts__["renderer_blacklist"], __opts__["renderer_whitelist"], - **kwargs + **kwargs, ) return ret.read() if salt.utils.stringio.is_readable(ret) else ret @@ -185,12 +185,12 @@ def _get_serialize_fn(serializer, fn_name): if not fns: raise salt.exceptions.CommandExecutionError( - "Serializer '{}' not found.".format(serializer) + f"Serializer '{serializer}' not found." ) if not fn: raise salt.exceptions.CommandExecutionError( - "Serializer '{}' does not implement {}.".format(serializer, fn_name) + f"Serializer '{serializer}' does not implement {fn_name}." ) return fn @@ -419,7 +419,7 @@ def findup(startpath, filenames, saltenv="base"): # Verify the cwd is a valid path in the state tree if startpath and not path_exists(startpath, saltenv): raise salt.exceptions.SaltInvocationError( - "Starting path not found in the state tree: {}".format(startpath) + f"Starting path not found in the state tree: {startpath}" ) # Ensure that patterns is a string or list of strings diff --git a/tests/pytests/integration/ssh/state/conftest.py b/tests/pytests/integration/ssh/state/conftest.py index 14d645ae8e8..9de0d6bcad8 100644 --- a/tests/pytests/integration/ssh/state/conftest.py +++ b/tests/pytests/integration/ssh/state/conftest.py @@ -17,7 +17,7 @@ def state_tree(base_env_state_tree_root_dir): state_file = """ {%- from "map.jinja" import abc with context %} Ok with {{ abc }}: - test.succeed_without_changes + test.succeed_with_changes """ top_tempfile = pytest.helpers.temp_file( "top.sls", top_file, base_env_state_tree_root_dir diff --git a/tests/pytests/integration/ssh/test_config.py b/tests/pytests/integration/ssh/test_config.py index d3ae2b03a3e..7f38ec5a0a8 100644 --- a/tests/pytests/integration/ssh/test_config.py +++ b/tests/pytests/integration/ssh/test_config.py @@ -16,7 +16,9 @@ def test_items(salt_ssh_cli): @pytest.mark.parametrize("omit", (False, True)) def test_option_minion_opt(salt_ssh_cli, omit): # Minion opt - ret = salt_ssh_cli.run("config.option", "id", omit_opts=omit, omit_grains=True) + ret = salt_ssh_cli.run( + "config.option", "id", omit_opts=omit, omit_grains=True, omit_master=True + ) assert ret.returncode == 0 assert (ret.data != salt_ssh_cli.get_minion_tgt()) is omit assert (ret.data == "") is omit diff --git a/tests/pytests/unit/client/ssh/wrapper/test_config.py b/tests/pytests/unit/client/ssh/wrapper/test_config.py index 64e89c762ad..a708b925fdf 100644 --- a/tests/pytests/unit/client/ssh/wrapper/test_config.py +++ b/tests/pytests/unit/client/ssh/wrapper/test_config.py @@ -3,7 +3,6 @@ This tests the SSH wrapper module. """ - import fnmatch import pytest From ec663ec3a7c4b771669c724829bb65efc0763cbb Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 25 Jun 2024 00:28:19 -0700 Subject: [PATCH 148/160] move comment to helpful docstring --- salt/config/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/salt/config/__init__.py b/salt/config/__init__.py index b6749868bd4..6d0bc947fc2 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -2216,6 +2216,10 @@ def include_config(include, orig_path, verbose, exit_on_config_errors=False): def should_prepend_root_dir(key, opts): + """ + Prepend root dir only when the key exists, has a value, and that value is + not a URI. + """ return ( key in opts and opts[key] is not None @@ -2519,7 +2523,6 @@ def syndic_config( "autosign_grains_dir", ] for config_key in ("log_file", "key_logfile", "syndic_log_file"): - # If this is not a URI and instead a local path if should_prepend_root_dir(config_key, opts): prepend_root_dirs.append(config_key) prepend_root_dir(opts, prepend_root_dirs) From a359f9188fe4cf2ffa13d638ba4dd812e561c23a Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 25 Jun 2024 01:08:16 -0700 Subject: [PATCH 149/160] More fips test fixes --- tests/integration/loader/test_ext_grains.py | 1 + .../functional/cli/test_salt_deltaproxy.py | 19 ++++++++++++++++++- tests/pytests/integration/cluster/conftest.py | 16 ++++++++++++++++ .../pytests/integration/runners/test_match.py | 15 ++++++--------- 4 files changed, 41 insertions(+), 10 deletions(-) diff --git a/tests/integration/loader/test_ext_grains.py b/tests/integration/loader/test_ext_grains.py index 242519aa3f4..9d6d23dc781 100644 --- a/tests/integration/loader/test_ext_grains.py +++ b/tests/integration/loader/test_ext_grains.py @@ -16,6 +16,7 @@ from tests.support.case import ModuleCase from tests.support.runtests import RUNTIME_VARS +@pytest.mark.timeout(120) @pytest.mark.windows_whitelisted class LoaderGrainsTest(ModuleCase): """ diff --git a/tests/pytests/functional/cli/test_salt_deltaproxy.py b/tests/pytests/functional/cli/test_salt_deltaproxy.py index 022aa77efcf..27faf8ed3d0 100644 --- a/tests/pytests/functional/cli/test_salt_deltaproxy.py +++ b/tests/pytests/functional/cli/test_salt_deltaproxy.py @@ -9,6 +9,7 @@ import pytest from saltfactories.utils import random_string import salt.defaults.exitcodes +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import PRE_PYTEST_SKIP_REASON log = logging.getLogger(__name__) @@ -29,7 +30,14 @@ def salt_master(salt_factories): "open_mode": True, } salt_master = salt_factories.salt_master_daemon( - "deltaproxy-functional-master", defaults=config_defaults + "deltaproxy-functional-master", + defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, ) with salt_master.started(): yield salt_master @@ -172,6 +180,15 @@ def test_exit_status_correct_usage_large_number_of_minions( factory = salt_master.salt_proxy_minion_daemon( proxy_minion_id, defaults=config_defaults, + overrides={ + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": ( + "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1" + ), + "signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), + }, extra_cli_arguments_after_first_start_failure=["--log-level=info"], start_timeout=240, ) diff --git a/tests/pytests/integration/cluster/conftest.py b/tests/pytests/integration/cluster/conftest.py index c62e034426e..4520ad55403 100644 --- a/tests/pytests/integration/cluster/conftest.py +++ b/tests/pytests/integration/cluster/conftest.py @@ -4,6 +4,7 @@ import subprocess import pytest import salt.utils.platform +from tests.conftest import FIPS_TESTRUN log = logging.getLogger(__name__) @@ -51,6 +52,10 @@ def cluster_master_1(request, salt_factories, cluster_pki_path, cluster_cache_pa "salt.channel": "debug", "salt.utils.event": "debug", }, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } factory = salt_factories.salt_master_daemon( "127.0.0.1", @@ -86,6 +91,10 @@ def cluster_master_2(salt_factories, cluster_master_1): "salt.channel": "debug", "salt.utils.event": "debug", }, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } # Use the same ports for both masters, they are binding to different interfaces @@ -128,6 +137,10 @@ def cluster_master_3(salt_factories, cluster_master_1): "salt.channel": "debug", "salt.utils.event": "debug", }, + "fips_mode": FIPS_TESTRUN, + "publish_signing_algorithm": ( + "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" + ), } # Use the same ports for both masters, they are binding to different interfaces @@ -163,6 +176,9 @@ def cluster_minion_1(cluster_master_1): "salt.channel": "debug", "salt.utils.event": "debug", }, + "fips_mode": FIPS_TESTRUN, + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", } factory = cluster_master_1.salt_minion_daemon( "cluster-minion-1", diff --git a/tests/pytests/integration/runners/test_match.py b/tests/pytests/integration/runners/test_match.py index 96f6c6bc70a..2ae7409c0a6 100644 --- a/tests/pytests/integration/runners/test_match.py +++ b/tests/pytests/integration/runners/test_match.py @@ -93,9 +93,8 @@ def match_salt_minion_alice(match_salt_master): defaults={"open_mode": True, "grains": {"role": "alice"}}, overrides={ "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", }, ) with factory.started(): @@ -114,9 +113,8 @@ def match_salt_minion_eve(match_salt_master): defaults={"open_mode": True, "grains": {"role": "eve"}}, overrides={ "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", }, ) with factory.started(): @@ -135,9 +133,8 @@ def match_salt_minion_bob(match_salt_master): defaults={"open_mode": True}, overrides={ "fips_mode": FIPS_TESTRUN, - "publish_signing_algorithm": ( - "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1" - ), + "encryption_algorithm": "OAEP-SHA224" if FIPS_TESTRUN else "OAEP-SHA1", + "signing_algorithm": "PKCS1v15-SHA224" if FIPS_TESTRUN else "PKCS1v15-SHA1", }, ) with factory.started(): From 24d5a4ea4b4ffd2b70f3aebb0db98d369dd8b382 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 25 Jun 2024 02:48:14 -0700 Subject: [PATCH 150/160] Skip flawed test --- tests/pytests/integration/daemons/test_memory_leak.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/pytests/integration/daemons/test_memory_leak.py b/tests/pytests/integration/daemons/test_memory_leak.py index d61bb85b736..44111647a04 100644 --- a/tests/pytests/integration/daemons/test_memory_leak.py +++ b/tests/pytests/integration/daemons/test_memory_leak.py @@ -37,6 +37,9 @@ def file_add_delete_sls(tmp_path, salt_master): yield sls_name +# This test is fundimentally flawed. Needs to be re-factored to test the memory +# consuption of the minoin process not system wide memory. +@pytest.mark.skip(reason="Flawed test") @pytest.mark.skip_on_fips_enabled_platform @pytest.mark.skip_on_windows(reason="Windows is a spawning platform, won't work") @pytest.mark.skip_on_darwin(reason="MacOS is a spawning platform, won't work") From 89442132ff687b90df91e1d51ac43ea7dfb00bc9 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 22 Jun 2024 01:18:10 -0700 Subject: [PATCH 151/160] Add documentation about Salt's master packge user --- doc/ref/configuration/master.rst | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index f056680cac5..b26f069251f 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -96,6 +96,14 @@ The user to run the Salt processes user: root +.. note:: + + Starting with version `3006.0`, Salt's offical packages ship with a default + configuration which runs the Master as a non-priviledged user. The Master's + configuration file has the `user` option set to `user: salt`. Unless you + are absolutly sure want to run salt as some other user, care should be + taken to preserve this setting in your Master configuration file.. + .. conf_master:: ret_port ``enable_ssh_minions`` From 78c29e16799f79bfc7f5745a6ea91b4b287f36b1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 25 Jun 2024 02:04:33 -0700 Subject: [PATCH 152/160] Bump pymysql to 1.1.1 --- requirements/static/ci/linux.in | 2 +- requirements/static/ci/py3.10/lint.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.11/lint.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.12/lint.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.8/lint.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.9/lint.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/pkg/py3.10/windows.txt | 2 +- requirements/static/pkg/py3.11/windows.txt | 2 +- requirements/static/pkg/py3.12/windows.txt | 2 +- requirements/static/pkg/py3.7/windows.txt | 2 +- requirements/static/pkg/py3.8/windows.txt | 2 +- requirements/static/pkg/py3.9/windows.txt | 2 +- requirements/windows.txt | 2 +- 19 files changed, 19 insertions(+), 19 deletions(-) diff --git a/requirements/static/ci/linux.in b/requirements/static/ci/linux.in index 8d247386d61..c52c53dc58b 100644 --- a/requirements/static/ci/linux.in +++ b/requirements/static/ci/linux.in @@ -3,7 +3,7 @@ pyiface pygit2>=1.10.1 -pymysql>=1.0.2 +pymysql>=1.1.1 ansible>=9.1.0; python_version >= '3.10' twilio python-telegram-bot>=13.7 diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 415b717d338..486f5600846 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -440,7 +440,7 @@ pylint==3.1.0 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 +pymysql==1.1.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/linux.in diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index c77f42ab986..408bb559306 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -317,7 +317,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat # via -r requirements/static/ci/common.in pyjwt==2.4.0 # via twilio -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/static/ci/linux.in pynacl==1.5.0 # via diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt index 120555d68e4..0f82eeb9812 100644 --- a/requirements/static/ci/py3.11/lint.txt +++ b/requirements/static/ci/py3.11/lint.txt @@ -415,7 +415,7 @@ pylint==3.1.0 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 +pymysql==1.1.1 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/static/ci/linux.in diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 485b10fe04d..50f9d2e6c23 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -298,7 +298,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat # via -r requirements/static/ci/common.in pyjwt==2.4.0 # via twilio -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/static/ci/linux.in pynacl==1.5.0 # via diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index c0cb40aea24..561121643c3 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -415,7 +415,7 @@ pylint==3.1.0 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 +pymysql==1.1.1 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/static/ci/linux.in diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index a647e737fb1..b5f17364acf 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -298,7 +298,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat # via -r requirements/static/ci/common.in pyjwt==2.4.0 # via twilio -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/static/ci/linux.in pynacl==1.5.0 # via diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 45d302e07ef..4cef9ee8a92 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -351,7 +351,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat # via -r requirements/static/ci/common.in pyjwt==2.4.0 # via twilio -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/static/ci/linux.in pynacl==1.5.0 # via diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 0f29de9ca21..10379323d13 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -466,7 +466,7 @@ pylint==3.1.0 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 +pymysql==1.1.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/linux.in diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index a941172a1b4..81e4f5887dd 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -337,7 +337,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat # via -r requirements/static/ci/common.in pyjwt==2.4.0 # via twilio -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/static/ci/linux.in pynacl==1.5.0 # via diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 528d6b19a6d..1d9c6c700e8 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -464,7 +464,7 @@ pylint==3.1.0 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 +pymysql==1.1.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/linux.in diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 7ceba7b213f..b51f0334b34 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -337,7 +337,7 @@ pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and plat # via -r requirements/static/ci/common.in pyjwt==2.4.0 # via twilio -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/static/ci/linux.in pynacl==1.5.0 # via diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index 16140376647..f4f37456d24 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -88,7 +88,7 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.7 # via -r requirements/windows.txt -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index 8648d4d0a94..4a320588e3d 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -88,7 +88,7 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.7 # via -r requirements/windows.txt -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt index 57a8aae61de..eedfb1bcc35 100644 --- a/requirements/static/pkg/py3.12/windows.txt +++ b/requirements/static/pkg/py3.12/windows.txt @@ -88,7 +88,7 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.7 # via -r requirements/windows.txt -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index 9328483442a..c89d669a617 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -88,7 +88,7 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.1 # via -r requirements/windows.txt -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 4aad72e3e2f..8d96a3e9d88 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -88,7 +88,7 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.1 # via -r requirements/windows.txt -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index 4f1d8efd7d8..096a32f28a0 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -88,7 +88,7 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.1 # via -r requirements/windows.txt -pymysql==1.0.2 +pymysql==1.1.1 # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt diff --git a/requirements/windows.txt b/requirements/windows.txt index 05884f31a70..ed5a71964c9 100644 --- a/requirements/windows.txt +++ b/requirements/windows.txt @@ -16,7 +16,7 @@ lxml>=4.6.3 pyasn1>=0.4.8 pycparser>=2.21 pymssql>=2.2.1 -pymysql>=1.0.2 +pymysql>=1.1.1 pyopenssl>=23.2.0 python-dateutil>=2.8.1 python-gnupg>=0.4.7 From 3821987b9dc5e3b6a1838c1732a863849a1fa6ae Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 25 Jun 2024 02:08:24 -0700 Subject: [PATCH 153/160] Add chengelog for dependency version updates --- changelog/66666.fixed.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 changelog/66666.fixed.md diff --git a/changelog/66666.fixed.md b/changelog/66666.fixed.md new file mode 100644 index 00000000000..076088f4d0c --- /dev/null +++ b/changelog/66666.fixed.md @@ -0,0 +1,4 @@ +Upgrade dependencies due to security issues: +- pymysql>=1.1.1 +- requests>=2.32.0 +- docker>=7.1.0 From 54ebfae38a745d316a6ee7d8bff6f282beaae4f7 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 25 Jun 2024 08:44:11 -0700 Subject: [PATCH 154/160] Remove pymysql from windows core requirements --- requirements/static/pkg/py3.10/windows.txt | 2 -- requirements/static/pkg/py3.11/windows.txt | 2 -- requirements/static/pkg/py3.12/windows.txt | 2 -- requirements/static/pkg/py3.7/windows.txt | 2 -- requirements/static/pkg/py3.8/windows.txt | 2 -- requirements/static/pkg/py3.9/windows.txt | 2 -- requirements/windows.txt | 1 - 7 files changed, 13 deletions(-) diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index f4f37456d24..bfe57f0e836 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -88,8 +88,6 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.7 # via -r requirements/windows.txt -pymysql==1.1.1 - # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt python-dateutil==2.8.1 diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt index 4a320588e3d..e06961fc0b4 100644 --- a/requirements/static/pkg/py3.11/windows.txt +++ b/requirements/static/pkg/py3.11/windows.txt @@ -88,8 +88,6 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.7 # via -r requirements/windows.txt -pymysql==1.1.1 - # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt python-dateutil==2.8.1 diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt index eedfb1bcc35..cc34a95759e 100644 --- a/requirements/static/pkg/py3.12/windows.txt +++ b/requirements/static/pkg/py3.12/windows.txt @@ -88,8 +88,6 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.7 # via -r requirements/windows.txt -pymysql==1.1.1 - # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt python-dateutil==2.8.1 diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index c89d669a617..3643bf7d4e1 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -88,8 +88,6 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.1 # via -r requirements/windows.txt -pymysql==1.1.1 - # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt python-dateutil==2.8.1 diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index 8d96a3e9d88..d90856f7480 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -88,8 +88,6 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.1 # via -r requirements/windows.txt -pymysql==1.1.1 - # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt python-dateutil==2.8.1 diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index 096a32f28a0..b41378856dd 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -88,8 +88,6 @@ pycryptodomex==3.19.1 # via -r requirements/crypto.txt pymssql==2.2.1 # via -r requirements/windows.txt -pymysql==1.1.1 - # via -r requirements/windows.txt pyopenssl==24.0.0 # via -r requirements/windows.txt python-dateutil==2.8.1 diff --git a/requirements/windows.txt b/requirements/windows.txt index ed5a71964c9..ecb512cf343 100644 --- a/requirements/windows.txt +++ b/requirements/windows.txt @@ -16,7 +16,6 @@ lxml>=4.6.3 pyasn1>=0.4.8 pycparser>=2.21 pymssql>=2.2.1 -pymysql>=1.1.1 pyopenssl>=23.2.0 python-dateutil>=2.8.1 python-gnupg>=0.4.7 From 679605891d5da1091402cc9b2fb91228028055de Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 26 Jun 2024 03:59:49 -0700 Subject: [PATCH 155/160] Skip test on photon --- tests/integration/loader/test_ext_grains.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/integration/loader/test_ext_grains.py b/tests/integration/loader/test_ext_grains.py index 9d6d23dc781..b1c36ace68e 100644 --- a/tests/integration/loader/test_ext_grains.py +++ b/tests/integration/loader/test_ext_grains.py @@ -16,7 +16,9 @@ from tests.support.case import ModuleCase from tests.support.runtests import RUNTIME_VARS -@pytest.mark.timeout(120) +@pytest.mark.skip_on_photonos( + reason="Consistant failures on photon, test needs refactoring" +) @pytest.mark.windows_whitelisted class LoaderGrainsTest(ModuleCase): """ From 83dc2acece98ebac5788ed8efbd487548513095a Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 27 Jun 2024 05:44:10 -0700 Subject: [PATCH 156/160] Fix pre-commit on 3006.x --- requirements/static/ci/py3.10/windows.txt | 4 ---- requirements/static/ci/py3.11/windows.txt | 4 ---- requirements/static/ci/py3.12/windows.txt | 4 ---- requirements/static/ci/py3.7/windows.txt | 4 ---- requirements/static/ci/py3.8/windows.txt | 4 ---- requirements/static/ci/py3.9/windows.txt | 4 ---- 6 files changed, 24 deletions(-) diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 5b9624a13bf..7b8227fe106 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -287,10 +287,6 @@ pymssql==2.2.7 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt -pymysql==1.0.2 - # via - # -c requirements/static/ci/../pkg/py3.10/windows.txt - # -r requirements/windows.txt pynacl==1.5.0 # via -r requirements/static/ci/common.in pyopenssl==24.0.0 diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 10d280d88e3..bba963d83c4 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -283,10 +283,6 @@ pymssql==2.2.7 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # -r requirements/windows.txt -pymysql==1.0.2 - # via - # -c requirements/static/ci/../pkg/py3.11/windows.txt - # -r requirements/windows.txt pynacl==1.5.0 # via -r requirements/static/ci/common.in pyopenssl==24.0.0 diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index c104716e476..1399fedbb34 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -279,10 +279,6 @@ pyfakefs==5.3.1 # via -r requirements/pytest.txt pygit2==1.13.1 # via -r requirements/static/ci/windows.in -pymssql==2.2.7 - # via - # -c requirements/static/ci/../pkg/py3.12/windows.txt - # -r requirements/windows.txt pymysql==1.0.2 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index a4876a1bb85..f80bc01b0fe 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -302,10 +302,6 @@ pymssql==2.2.1 # via # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt -pymysql==1.0.2 - # via - # -c requirements/static/ci/../pkg/py3.7/windows.txt - # -r requirements/windows.txt pynacl==1.5.0 # via -r requirements/static/ci/common.in pyopenssl==24.0.0 diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index b4b7b370aae..6e4fc76fa20 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -288,10 +288,6 @@ pymssql==2.2.1 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt -pymysql==1.0.2 - # via - # -c requirements/static/ci/../pkg/py3.8/windows.txt - # -r requirements/windows.txt pynacl==1.5.0 # via -r requirements/static/ci/common.in pyopenssl==24.0.0 diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index f286fbcccc3..df626b9c650 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -289,10 +289,6 @@ pymssql==2.2.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt -pymysql==1.0.2 - # via - # -c requirements/static/ci/../pkg/py3.9/windows.txt - # -r requirements/windows.txt pynacl==1.5.0 # via -r requirements/static/ci/common.in pyopenssl==24.0.0 From 6984ae0ca1309aa98ab68578f6be657a9301dc4f Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 27 Jun 2024 06:32:24 -0700 Subject: [PATCH 157/160] Upgrade relenv to 0.17.0 --- .github/workflows/ci.yml | 8 ++++---- .github/workflows/nightly.yml | 8 ++++---- .github/workflows/scheduled.yml | 8 ++++---- .github/workflows/staging.yml | 8 ++++---- changelog/66663.fixed.md | 1 + cicd/shared-gh-workflows-context.yml | 2 +- 6 files changed, 18 insertions(+), 17 deletions(-) create mode 100644 changelog/66663.fixed.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bd3e777d3f9..20769929078 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -416,7 +416,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-salt-onedir: @@ -432,7 +432,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-pkgs-onedir: @@ -445,7 +445,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "onedir" @@ -459,7 +459,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "src" build-ci-deps: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 4f851c7d93c..93c4c09f55e 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -470,7 +470,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-salt-onedir: @@ -486,7 +486,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-pkgs-onedir: @@ -499,7 +499,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "onedir" environment: nightly @@ -517,7 +517,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "src" environment: nightly diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index ca2e0ab3bd7..2ab7dc11b6d 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -455,7 +455,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-salt-onedir: @@ -471,7 +471,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-pkgs-onedir: @@ -484,7 +484,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "onedir" @@ -498,7 +498,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "src" build-ci-deps: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 3a1c39bd4ad..1313812f8ed 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -455,7 +455,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-salt-onedir: @@ -471,7 +471,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" build-pkgs-onedir: @@ -484,7 +484,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "onedir" environment: staging @@ -502,7 +502,7 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - relenv-version: "0.16.1" + relenv-version: "0.17.0" python-version: "3.10.14" source: "src" environment: staging diff --git a/changelog/66663.fixed.md b/changelog/66663.fixed.md new file mode 100644 index 00000000000..14a40b4730e --- /dev/null +++ b/changelog/66663.fixed.md @@ -0,0 +1 @@ +Upgrade relenv to 0.17.0 (https://github.com/saltstack/relenv/blob/v0.17.0/CHANGELOG.md) diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 9b2955f5974..9cd0641c739 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,6 +1,6 @@ nox_version: "2022.8.7" python_version: "3.10.14" -relenv_version: "0.16.1" +relenv_version: "0.17.0" mandatory_os_slugs: - rockylinux-9 - amazonlinux-2023-arm64 From 0c5022277523fadd73b1a0191cee0fd872aa0c96 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Sat, 29 Jun 2024 21:17:00 -0700 Subject: [PATCH 158/160] Fix pre-commit --- requirements/static/ci/py3.12/windows.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 1399fedbb34..10db47cc116 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -279,7 +279,7 @@ pyfakefs==5.3.1 # via -r requirements/pytest.txt pygit2==1.13.1 # via -r requirements/static/ci/windows.in -pymysql==1.0.2 +pymssql==2.2.7 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # -r requirements/windows.txt From 7cbfcf027e0c405316cf45bdf09ec55489fbc374 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Tue, 4 Jun 2024 16:15:53 -0600 Subject: [PATCH 159/160] Add some more error output, use $env:TEMP --- pkg/windows/install_vs_buildtools.ps1 | 126 ++++++++++++++++---------- 1 file changed, 78 insertions(+), 48 deletions(-) diff --git a/pkg/windows/install_vs_buildtools.ps1 b/pkg/windows/install_vs_buildtools.ps1 index b92751550ce..5988ae5a1ae 100644 --- a/pkg/windows/install_vs_buildtools.ps1 +++ b/pkg/windows/install_vs_buildtools.ps1 @@ -25,6 +25,8 @@ param( [System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12 $ProgressPreference = "SilentlyContinue" $ErrorActionPreference = "Stop" +# https://stackoverflow.com/a/67201331/4581998 +$env:PSModulePath = [Environment]::GetEnvironmentVariable('PSModulePath', 'Machine') #------------------------------------------------------------------------------- # Script Functions @@ -39,6 +41,43 @@ function Write-Result($result, $ForegroundColor="Green") { } } +function Add-Certificate { + [CmdletBinding()] + param( + + [Parameter(Mandatory=$true)] + # The path in the certstore (CERT:/LocalMachine/Root/) + [String] $Path, + + [Parameter(Mandatory=$true)] + # The path to the cert file for importing + [String] $File, + + [Parameter(Mandatory=$true)] + # The name of the cert file for importing + [String] $Name + + ) + + # Validation + if ( ! (Test-Path -Path $File)) { + Write-Host "Invalid path to certificate file" + exit 1 + } + + if (! (Test-Path -Path $Path) ) { + + Write-Host "Installing Certificate $Name`: " -NoNewLine + $output = Import-Certificate -FilePath $File -CertStoreLocation "Cert:\LocalMachine\Root" + if ( Test-Path -Path $Path ) { + Write-Result "Success" + } else { + Write-Result "Failed" -ForegroundColor Yellow + Write-Host $output + } + } +} + #------------------------------------------------------------------------------- # Start the Script #------------------------------------------------------------------------------- @@ -53,25 +92,32 @@ Write-Host $("-" * 80) # Dependency Variables $VS_BLD_TOOLS = "https://aka.ms/vs/15/release/vs_buildtools.exe" -$VS_CL_BIN = "${env:ProgramFiles(x86)}\Microsoft Visual Studio 14.0\VC\bin\cl.exe" -$MSBUILD_BIN = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\2017\BuildTools\MSBuild\15.0\Bin\msbuild.exe" -$WIN10_SDK_RC = "${env:ProgramFiles(x86)}\Windows Kits\10\bin\10.0.17763.0\x64\rc.exe" +try { + # If VS is installed, you will be able to get the WMI Object MSFT_VSInstance + $VS_INST_LOC = $(Get-CimInstance MSFT_VSInstance -Namespace root/cimv2/vs).InstallLocation + $MSBUILD_BIN = $(Get-ChildItem "$VS_INST_LOC\MSBuild\*\Bin\msbuild.exe").FullName +} catch { + # If VS is not installed, this is the fallback for this installation + $MSBUILD_BIN = "${env:ProgramFiles(x86)}\Microsoft Visual Studio\2017\BuildTools\MSBuild\15.0\Bin\msbuild.exe" +} #------------------------------------------------------------------------------- # Visual Studio #------------------------------------------------------------------------------- -$install_build_tools = $false Write-Host "Confirming Presence of Visual Studio Build Tools: " -NoNewline -@($VS_CL_BIN, $MSBUILD_BIN, $WIN10_SDK_RC) | ForEach-Object { - if ( ! (Test-Path -Path $_) ) { - $install_build_tools = $true - } -} - -if ( $install_build_tools ) { +# We're only gonna look for msbuild.exe +if ( Test-Path -Path $MSBUILD_BIN ) { + Write-Result "Success" -ForegroundColor Green +} else { Write-Result "Missing" -ForegroundColor Yellow + try { + # If VS is installed, you will be able to get the WMI Object MSFT_VSInstance + Write-Host "Get VS Instance Information" + Get-CimInstance MSFT_VSInstance -Namespace root/cimv2/vs + } catch {} + Write-Host "Checking available disk space: " -NoNewLine $available = (Get-PSDrive $env:SystemDrive.Trim(":")).Free if ( $available -gt (1024 * 1024 * 1024 * 9.1) ) { @@ -101,7 +147,6 @@ if ( $install_build_tools ) { "--add Microsoft.VisualStudio.Workload.MSBuildTools", ` "--add Microsoft.VisualStudio.Workload.VCTools", ` "--add Microsoft.VisualStudio.Component.Windows81SDK", ` - "--add Microsoft.VisualStudio.Component.Windows10SDK.17763", ` "--add Microsoft.VisualStudio.Component.VC.140", ` "--lang en-US", ` "--includeRecommended", ` @@ -115,50 +160,35 @@ if ( $install_build_tools ) { exit 1 } - # Serial: 28cc3a25bfba44ac449a9b586b4339a + # Serial: 28cc3a25bfba44ac449a9b586b4339aa # Hash: 3b1efd3a66ea28b16697394703a72ca340a05bd5 - if (! (Test-Path -Path Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5) ) { - Write-Host "Installing Certificate Sign Root Certificate: " -NoNewLine - $proc = Start-Process -FilePath "certutil" ` - -ArgumentList "-addstore", ` - "Root", ` - "$($env:TEMP)\build_tools\certificates\manifestCounterSignRootCertificate.cer" ` - -PassThru -Wait -WindowStyle Hidden - if ( $proc.ExitCode -eq 0 ) { - Write-Result "Success" -ForegroundColor Green - } else { - Write-Result "Failed" -ForegroundColor Yellow - } - } + $cert_name = "Sign Root Certificate" + $cert_path = "Cert:\LocalMachine\Root\3b1efd3a66ea28b16697394703a72ca340a05bd5" + $cert_file = "$env:TEMP\build_tools\certificates\manifestCounterSignRootCertificate.cer" + Add-Certificate -Name $cert_name -Path $cert_path -File $cert_file # Serial: 3f8bc8b5fc9fb29643b569d66c42e144 # Hash: 8f43288ad272f3103b6fb1428485ea3014c0bcfe - if (! (Test-Path -Path Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe) ) { - Write-Host "Installing Certificate Root Certificate: " -NoNewLine - $proc = Start-Process -FilePath "certutil" ` - -ArgumentList "-addstore", ` - "Root", ` - "$($env:TEMP)\build_tools\certificates\manifestRootCertificate.cer" ` - -PassThru -Wait -WindowStyle Hidden - if ( $proc.ExitCode -eq 0 ) { - Write-Result "Success" -ForegroundColor Green - } else { - Write-Result "Failed" -ForegroundColor Yellow - } - } + $cert_name = "Root Certificate" + $cert_path = "Cert:\LocalMachine\Root\8f43288ad272f3103b6fb1428485ea3014c0bcfe" + $cert_file = "$env:TEMP\build_tools\certificates\manifestRootCertificate.cer" + Add-Certificate -Name $cert_name -Path $cert_path -File $cert_file Write-Host "Installing Visual Studio 2017 build tools: " -NoNewline - $proc = Start-Process -FilePath "$env:TEMP\build_tools\vs_setup.exe" ` - -ArgumentList "--wait", "--noweb", "--quiet" ` - -Wait -PassThru - if ( $proc.ExitCode -eq 0 ) { - Write-Result "Success" -ForegroundColor Green - } else { - Write-Result "Failed" -ForegroundColor Yellow + $proc = Start-Process ` + -FilePath "$env:TEMP\build_tools\vs_setup.exe" ` + -ArgumentList "--wait", "--noweb", "--quiet" ` + -PassThru -Wait ` + -RedirectStandardOutput "$env:TEMP\stdout.txt" + if ( Test-Path -Path $MSBUILD_BIN ) { + Write-Result "Failed" -ForegroundColor Red + Write-Host "Missing: $_" + Write-Host "ExitCode: $($proc.ExitCode)" + Write-Host "STDOUT:" + Get-Content "$env:TEMP\stdout.txt" + exit 1 } Write-Result "Success" -ForegroundColor Green -} else { - Write-Result "Success" -ForegroundColor Green } #------------------------------------------------------------------------------- From a09558bb27201e91212008243590ea6a54c6feb6 Mon Sep 17 00:00:00 2001 From: David Murphy Date: Tue, 2 Jul 2024 15:38:31 -0600 Subject: [PATCH 160/160] Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044 --- changelog/66683.fixed.md | 1 + salt/modules/ipset.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog/66683.fixed.md diff --git a/changelog/66683.fixed.md b/changelog/66683.fixed.md new file mode 100644 index 00000000000..2917188fa63 --- /dev/null +++ b/changelog/66683.fixed.md @@ -0,0 +1 @@ +Corrected missed line in branch 3006.x when backporting from PR 61620 and 65044 diff --git a/salt/modules/ipset.py b/salt/modules/ipset.py index b666bd0957e..daa32dd3fb1 100644 --- a/salt/modules/ipset.py +++ b/salt/modules/ipset.py @@ -327,7 +327,7 @@ def new_set(name=None, set_type=None, family="ipv4", comment=False, **kwargs): # Family only valid for certain set types if "family" in _CREATE_OPTIONS[set_type]: - cmd.extend(["family", cmd, ipset_family]) + cmd.extend(["family", ipset_family]) if comment: cmd.append("comment")