Merge pull request #65981 from s0undt3ch/hotfix/merge-forward-into-master

[master] Merge 3007.x into master
This commit is contained in:
Pedro Algarvio 2024-02-02 18:16:58 +00:00 committed by GitHub
commit 6dd2f1783d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
98 changed files with 19584 additions and 12744 deletions

View file

@ -172,7 +172,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Write Changed Files To A Local File
run:
@ -1995,6 +1995,7 @@ jobs:
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
if: ${{ ! github.event.repository.private }}
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/

View file

@ -224,7 +224,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Write Changed Files To A Local File
run:
@ -2055,6 +2055,7 @@ jobs:
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
if: ${{ ! github.event.repository.private }}
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/
@ -2264,6 +2265,14 @@ jobs:
distro: ubuntu
version: "22.04"
arch: arm64
- pkg-type: deb
distro: ubuntu
version: "23.04"
arch: x86_64
- pkg-type: deb
distro: ubuntu
version: "23.04"
arch: arm64
steps:
- uses: actions/checkout@v4
@ -2377,54 +2386,6 @@ jobs:
distro: amazon
version: "2023"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "7"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "8"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "8"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "8"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "9"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "9"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "9"
arch: aarch64
- pkg-type: rpm
distro: fedora
version: "36"
arch: x86_64
- pkg-type: rpm
distro: fedora
version: "36"
arch: arm64
- pkg-type: rpm
distro: fedora
version: "36"
arch: aarch64
- pkg-type: rpm
distro: fedora
version: "37"
@ -2485,6 +2446,42 @@ jobs:
distro: photon
version: "5"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "7"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "8"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "8"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "8"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "9"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "9"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "9"
arch: aarch64
steps:
- uses: actions/checkout@v4

View file

@ -117,7 +117,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
download-onedir-artifact:
name: Download Staging Onedir Artifact

View file

@ -214,7 +214,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Write Changed Files To A Local File
run:
@ -2037,6 +2037,7 @@ jobs:
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
if: ${{ ! github.event.repository.private }}
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/

View file

@ -202,7 +202,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Check Existing Releases
env:
@ -2104,6 +2104,14 @@ jobs:
distro: ubuntu
version: "22.04"
arch: arm64
- pkg-type: deb
distro: ubuntu
version: "23.04"
arch: x86_64
- pkg-type: deb
distro: ubuntu
version: "23.04"
arch: arm64
steps:
- uses: actions/checkout@v4
@ -2217,54 +2225,6 @@ jobs:
distro: amazon
version: "2023"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "7"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "8"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "8"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "8"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "9"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "9"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "9"
arch: aarch64
- pkg-type: rpm
distro: fedora
version: "36"
arch: x86_64
- pkg-type: rpm
distro: fedora
version: "36"
arch: arm64
- pkg-type: rpm
distro: fedora
version: "36"
arch: aarch64
- pkg-type: rpm
distro: fedora
version: "37"
@ -2325,6 +2285,42 @@ jobs:
distro: photon
version: "5"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "7"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "7"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "7"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "8"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "8"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "8"
arch: aarch64
- pkg-type: rpm
distro: redhat
version: "9"
arch: x86_64
- pkg-type: rpm
distro: redhat
version: "9"
arch: arm64
- pkg-type: rpm
distro: redhat
version: "9"
arch: aarch64
steps:
- uses: actions/checkout@v4

View file

@ -376,6 +376,7 @@
nox --force-color -e create-xml-coverage-reports
- name: Upload Code Coverage To Codecov
if: ${{ ! github.event.repository.private }}
run: |
tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/

View file

@ -9,7 +9,7 @@
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
<%- set gh_actions_workflows_python_version = "3.10" %>
<%- set nox_archive_hashfiles = "${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" %>
<%- set nox_archive_hashfiles = "${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" %>
---
<%- block name %>
name: <{ workflow_name }>

View file

@ -86,7 +86,7 @@ jobs:
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
@ -94,12 +94,12 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.xz
- name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
- name: Download nox.linux.${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
name: nox-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}-${{ inputs.nox-session }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -231,7 +231,7 @@ jobs:
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
@ -239,12 +239,12 @@ jobs:
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.xz
- name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
- name: Download nox.linux.${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
name: nox-linux-${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }}-${{ inputs.nox-session }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts

View file

@ -179,7 +179,48 @@ Versions are `MAJOR.PATCH`.
- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65137](https://github.com/saltstack/salt/issues/65137)
## 3006.6 (2024-01-26)
### Changed
- Salt no longer time bombs user installations on code using `salt.utils.versions.warn_until_date` [#665924](https://github.com/saltstack/salt/issues/665924)
### Fixed
- Fix un-closed transport in tornado netapi [#65759](https://github.com/saltstack/salt/issues/65759)
### Security
- CVE-2024-22231 Prevent directory traversal when creating syndic cache directory on the master
CVE-2024-22232 Prevent directory traversal attacks in the master's serve_file method.
These vulerablities were discovered and reported by:
Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab) [#565](https://github.com/saltstack/salt/issues/565)
- Update some requirements which had some security issues:
* Bump to `pycryptodome==3.19.1` and `pycryptodomex==3.19.1` due to https://github.com/advisories/GHSA-j225-cvw7-qrx7
* Bump to `gitpython==3.1.41` due to https://github.com/advisories/GHSA-2mqj-m65w-jghx
* Bump to `jinja2==3.1.3` due to https://github.com/advisories/GHSA-h5c8-rqwp-cp95 [#65830](https://github.com/saltstack/salt/issues/65830)
## 3006.5 (2023-12-12)
Salt 3005.5 (2024-01-19)
========================
Security
--------
- Fix CVE-2024-22231 Prevent directory traversal when creating syndic cache directory on the master.
- Fix CVE-2024-22232 Prevent directory traversal attacks in the master's serve_file method.
These vulnerablities were discovered and reported by:
Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab) (#565)
Salt v3005.4 (2023-10-16)
=========================
### Removed

1
changelog/52289.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed an issue when keys didn't match because of line endings

View file

@ -1 +0,0 @@
Fix un-closed transport in tornado netapi

View file

@ -1,5 +0,0 @@
Update some requirements which had some security issues:
* Bump to `pycryptodome==3.19.1` and `pycryptodomex==3.19.1` due to https://github.com/advisories/GHSA-j225-cvw7-qrx7
* Bump to `gitpython==3.1.41` due to https://github.com/advisories/GHSA-2mqj-m65w-jghx
* Bump to `jinja2==3.1.3` due to https://github.com/advisories/GHSA-h5c8-rqwp-cp95

View file

@ -0,0 +1 @@
Change module search path priority, so Salt extensions can be overridden by syncable modules and module_dirs. You can switch back to the old logic by setting features.enable_deprecated_module_search_path_priority to true, but it will be removed in Salt 3008.

View file

@ -0,0 +1 @@
Deprecate and stop using ``salt.features``

View file

@ -1 +0,0 @@
Salt no longer time bombs user installations on code using `salt.utils.versions.warn_until_date`

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-API" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-API" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-api \- salt-api Command
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-CALL" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-CALL" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-call \- salt-call Documentation
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-CLOUD" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-CLOUD" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-cloud \- Salt Cloud Command
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-CP" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-CP" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-cp \- salt-cp Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-KEY" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-KEY" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-key \- salt-key Documentation
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-MASTER" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-MASTER" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-master \- salt-master Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-MINION" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-MINION" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-minion \- salt-minion Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-PROXY" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-PROXY" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-proxy \- salt-proxy Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-RUN" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-RUN" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-run \- salt-run Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-SSH" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-SSH" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-ssh \- salt-ssh Documentation
.SH SYNOPSIS

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT-SYNDIC" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT-SYNDIC" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt-syndic \- salt-syndic Documentation
.sp

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SALT" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SALT" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
salt \- salt
.SH SYNOPSIS

File diff suppressed because it is too large Load diff

View file

@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.TH "SPM" "1" "Generated on January 02, 2024 at 09:39:27 PM UTC." "3007.0" "Salt"
.TH "SPM" "1" "Generated on January 26, 2024 at 11:57:28 AM UTC." "3006.6" "Salt"
.SH NAME
spm \- Salt Package Manager Command
.sp

View file

@ -0,0 +1,16 @@
.. _release-3005-5:
=========================
Salt 3005.5 Release Notes
=========================
Version 3005.5 is a CVE security fix release for :ref:`3005 <release-3005>`.
Security
--------
- Fix CVE-2024-22231 by preventing directory traversal when creating syndic cache directory on the master.
- Fix CVE-2024-22232 Prevent directory traversal attacks in the master's serve_file method.
These vulnerablities were discovered and reported by:
Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab) (#565)

View file

@ -0,0 +1,41 @@
(release-3006.6)=
# Salt 3006.6 release notes
<!---
Do not edit this file. This is auto generated.
Edit the templates in doc/topics/releases/templates/
for a given release.
-->
<!--
Add release specific details below
-->
<!--
Do not edit the changelog below.
This is auto generated.
-->
## Changelog
### Changed
- Salt no longer time bombs user installations on code using `salt.utils.versions.warn_until_date` [#665924](https://github.com/saltstack/salt/issues/665924)
### Fixed
- Fix un-closed transport in tornado netapi [#65759](https://github.com/saltstack/salt/issues/65759)
### Security
- CVE-2024-22231 Prevent directory traversal when creating syndic cache directory on the master
CVE-2024-22232 Prevent directory traversal attacks in the master's serve_file method.
These vulerablities were discovered and reported by:
Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab) [#565](https://github.com/saltstack/salt/issues/565)
- Update some requirements which had some security issues:
* Bump to `pycryptodome==3.19.1` and `pycryptodomex==3.19.1` due to https://github.com/advisories/GHSA-j225-cvw7-qrx7
* Bump to `gitpython==3.1.41` due to https://github.com/advisories/GHSA-2mqj-m65w-jghx
* Bump to `jinja2==3.1.3` due to https://github.com/advisories/GHSA-h5c8-rqwp-cp95 [#65830](https://github.com/saltstack/salt/issues/65830)

View file

@ -0,0 +1,14 @@
(release-3006.6)=
# Salt 3006.6 release notes{{ unreleased }}
{{ warning }}
<!--
Add release specific details below
-->
<!--
Do not edit the changelog below.
This is auto generated.
-->
## Changelog
{{ changelog }}

View file

@ -1,3 +1,29 @@
salt (3006.6) stable; urgency=medium
# Changed
* Salt no longer time bombs user installations on code using `salt.utils.versions.warn_until_date` [#665924](https://github.com/saltstack/salt/issues/665924)
# Fixed
* Fix un-closed transport in tornado netapi [#65759](https://github.com/saltstack/salt/issues/65759)
# Security
* CVE-2024-22231 Prevent directory traversal when creating syndic cache directory on the master
CVE*2024-22232 Prevent directory traversal attacks in the master's serve_file method.
These vulerablities were discovered and reported by:
Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab) [#565](https://github.com/saltstack/salt/issues/565)
* Update some requirements which had some security issues:
* Bump to `pycryptodome==3.19.1` and `pycryptodomex==3.19.1` due to https://github.com/advisories/GHSA*j225-cvw7-qrx7
* Bump to `gitpython==3.1.41` due to https://github.com/advisories/GHSA*2mqj-m65w-jghx
* Bump to `jinja2==3.1.3` due to https://github.com/advisories/GHSA*h5c8-rqwp-cp95 [#65830](https://github.com/saltstack/salt/issues/65830)
-- Salt Project Packaging <saltproject-packaging@vmware.com> Fri, 26 Jan 2024 11:56:46 +0000
salt (3007.0rc1) stable; urgency=medium
@ -167,6 +193,7 @@ salt (3007.0rc1) stable; urgency=medium
-- Salt Project Packaging <saltproject-packaging@vmware.com> Tue, 02 Jan 2024 21:36:56 +0000
salt (3006.5) stable; urgency=medium

View file

@ -583,6 +583,29 @@ fi
%changelog
* Fri Jan 26 2024 Salt Project Packaging <saltproject-packaging@vmware.com> - 3006.6
# Changed
- Salt no longer time bombs user installations on code using `salt.utils.versions.warn_until_date` [#665924](https://github.com/saltstack/salt/issues/665924)
# Fixed
- Fix un-closed transport in tornado netapi [#65759](https://github.com/saltstack/salt/issues/65759)
# Security
- CVE-2024-22231 Prevent directory traversal when creating syndic cache directory on the master
CVE-2024-22232 Prevent directory traversal attacks in the master's serve_file method.
These vulerablities were discovered and reported by:
Yudi Zhao(Huawei Nebula Security Lab),Chenwei Jiang(Huawei Nebula Security Lab) [#565](https://github.com/saltstack/salt/issues/565)
- Update some requirements which had some security issues:
* Bump to `pycryptodome==3.19.1` and `pycryptodomex==3.19.1` due to https://github.com/advisories/GHSA-j225-cvw7-qrx7
* Bump to `gitpython==3.1.41` due to https://github.com/advisories/GHSA-2mqj-m65w-jghx
* Bump to `jinja2==3.1.3` due to https://github.com/advisories/GHSA-h5c8-rqwp-cp95 [#65830](https://github.com/saltstack/salt/issues/65830)
* Tue Jan 02 2024 Salt Project Packaging <saltproject-packaging@vmware.com> - 3007.0~rc1
# Removed
@ -749,6 +772,7 @@ fi
- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65137](https://github.com/saltstack/salt/issues/65137)
* Tue Dec 12 2023 Salt Project Packaging <saltproject-packaging@vmware.com> - 3006.5
# Removed

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/base.txt

View file

@ -6,7 +6,7 @@
#
aiohttp-retry==2.8.3
# via twilio
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# -r requirements/base.txt

View file

@ -6,7 +6,7 @@
#
aiohttp-retry==2.8.3
# via twilio
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# -r requirements/base.txt

View file

@ -8,7 +8,7 @@ aiohttp-retry==2.8.3
# via
# -c requirements/static/ci/py3.12/linux.txt
# twilio
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt

View file

@ -6,7 +6,7 @@
#
aiohttp-retry==2.8.3
# via twilio
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
# -r requirements/base.txt

View file

@ -6,7 +6,7 @@
#
aiohttp-retry==2.8.3
# via twilio
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.8/linux.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.8/windows.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/base.txt

View file

@ -6,7 +6,7 @@
#
aiohttp-retry==2.8.3
# via twilio
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# -r requirements/base.txt

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.8/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.8/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.8/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -4,7 +4,7 @@
#
# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/windows.txt requirements/static/pkg/windows.in requirements/windows.txt
#
aiohttp==3.9.1
aiohttp==3.9.2
# via -r requirements/base.txt
aiosignal==1.3.1
# via aiohttp

View file

@ -56,6 +56,10 @@ class ReqServerChannel:
transport = salt.transport.request_server(opts, **kwargs)
return cls(opts, transport)
@staticmethod
def _clean_key(key):
return key.strip().replace("\r", "").replace("\n", "")
def __init__(self, opts, transport):
self.opts = opts
self.transport = transport
@ -381,7 +385,7 @@ class ReqServerChannel:
elif os.path.isfile(pubfn):
# The key has been accepted, check it
with salt.utils.files.fopen(pubfn, "r") as pubfn_handle:
if pubfn_handle.read().strip() != load["pub"].strip():
if self._clean_key(pubfn_handle.read()) != self._clean_key(load["pub"]):
log.error(
"Authentication attempt from %s failed, the public "
"keys did not match. This may be an attempt to compromise "
@ -490,7 +494,9 @@ class ReqServerChannel:
# case. Otherwise log the fact that the minion is still
# pending.
with salt.utils.files.fopen(pubfn_pend, "r") as pubfn_handle:
if pubfn_handle.read() != load["pub"]:
if self._clean_key(pubfn_handle.read()) != self._clean_key(
load["pub"]
):
log.error(
"Authentication attempt from %s failed, the public "
"key in pending did not match. This may be an "
@ -546,7 +552,9 @@ class ReqServerChannel:
# so, pass on doing anything here, and let it get automatically
# accepted below.
with salt.utils.files.fopen(pubfn_pend, "r") as pubfn_handle:
if pubfn_handle.read() != load["pub"]:
if self._clean_key(pubfn_handle.read()) != self._clean_key(
load["pub"]
):
log.error(
"Authentication attempt from %s failed, the public "
"keys in pending did not match. This may be an "

View file

@ -14,6 +14,7 @@ from copy import deepcopy
import salt.defaults.exitcodes
import salt.exceptions
import salt.features
import salt.syspaths
import salt.utils.data
import salt.utils.dictupdate
@ -1310,6 +1311,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze(
"global_state_conditions": None,
"reactor_niceness": None,
"fips_mode": False,
"features": {},
}
)
@ -1658,6 +1660,7 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze(
"cluster_id": None,
"cluster_peers": [],
"cluster_pki_dir": None,
"features": {},
}
)
@ -1692,6 +1695,7 @@ DEFAULT_PROXY_MINION_OPTS = immutabletypes.freeze(
"pki_dir": os.path.join(salt.syspaths.LIB_STATE_DIR, "pki", "proxy"),
"cachedir": os.path.join(salt.syspaths.CACHE_DIR, "proxy"),
"sock_dir": os.path.join(salt.syspaths.SOCK_DIR, "proxy"),
"features": {},
}
)
@ -1725,6 +1729,7 @@ DEFAULT_CLOUD_OPTS = immutabletypes.freeze(
"log_rotate_backup_count": 0,
"bootstrap_delay": 0,
"cache": "localfs",
"features": {},
}
)
@ -2335,6 +2340,7 @@ def minion_config(
if role != "master":
apply_sdb(opts)
_validate_opts(opts)
salt.features.setup_features(opts)
return opts
@ -2346,6 +2352,7 @@ def mminion_config(path, overrides, ignore_config_errors=True):
_validate_opts(opts)
opts["grains"] = salt.loader.grains(opts)
opts["pillar"] = {}
salt.features.setup_features(opts)
return opts
@ -2432,6 +2439,7 @@ def proxy_config(
apply_sdb(opts)
_validate_opts(opts)
salt.features.setup_features(opts)
return opts
@ -2509,6 +2517,7 @@ def syndic_config(
if urllib.parse.urlparse(opts.get(config_key, "")).scheme == "":
prepend_root_dirs.append(config_key)
prepend_root_dir(opts, prepend_root_dirs)
salt.features.setup_features(opts)
return opts
@ -2761,6 +2770,7 @@ def cloud_config(
prepend_root_dirs.append(opts["log_file"])
prepend_root_dir(opts, prepend_root_dirs)
salt.features.setup_features(opts)
# Return the final options
return opts
@ -3938,6 +3948,7 @@ def master_config(
if salt.utils.data.is_dictlist(opts["nodegroups"]):
opts["nodegroups"] = salt.utils.data.repack_dictlist(opts["nodegroups"])
apply_sdb(opts)
salt.features.setup_features(opts)
return opts
@ -4200,6 +4211,7 @@ def client_config(path, env_var="SALT_CLIENT_CONFIG", defaults=None):
# Return the client options
_validate_opts(opts)
salt.features.setup_features(opts)
return opts
@ -4223,6 +4235,7 @@ def api_config(path):
)
prepend_root_dir(opts, ["api_pidfile", "api_logfile", "log_file", "pidfile"])
salt.features.setup_features(opts)
return opts

View file

@ -21,6 +21,14 @@ class Features:
log.warning("Features already setup")
def get(self, key, default=None):
import salt.utils.versions
salt.utils.versions.warn_until(
3008,
"Please stop checking feature flags using 'salt.features' and instead "
"check the 'features' keyword on the configuration dictionary. The "
"'salt.features' module will go away in {version}.",
)
return self.features.get(key, default)

View file

@ -321,9 +321,9 @@ def clear_lock(clear_func, role, remote=None, lock_type="update"):
Returns the return data from ``clear_func``.
"""
msg = "Clearing {} lock for {} remotes".format(lock_type, role)
msg = f"Clearing {lock_type} lock for {role} remotes"
if remote:
msg += " matching {}".format(remote)
msg += f" matching {remote}"
log.debug(msg)
return clear_func(remote=remote, lock_type=lock_type)
@ -376,12 +376,12 @@ class Fileserver:
# Only subtracting backends from enabled ones
ret = self.opts["fileserver_backend"]
for sub in back:
if "{}.envs".format(sub[1:]) in server_funcs:
if f"{sub[1:]}.envs" in server_funcs:
ret.remove(sub[1:])
return ret
for sub in back:
if "{}.envs".format(sub) in server_funcs:
if f"{sub}.envs" in server_funcs:
ret.append(sub)
return ret
@ -409,7 +409,7 @@ class Fileserver:
cleared = []
errors = []
for fsb in back:
fstr = "{}.clear_cache".format(fsb)
fstr = f"{fsb}.clear_cache"
if fstr in self.servers:
log.debug("Clearing %s fileserver cache", fsb)
failed = self.servers[fstr]()
@ -417,7 +417,7 @@ class Fileserver:
errors.extend(failed)
else:
cleared.append(
"The {} fileserver cache was successfully cleared".format(fsb)
f"The {fsb} fileserver cache was successfully cleared"
)
return cleared, errors
@ -431,17 +431,15 @@ class Fileserver:
locked = []
errors = []
for fsb in back:
fstr = "{}.lock".format(fsb)
fstr = f"{fsb}.lock"
if fstr in self.servers:
msg = "Setting update lock for {} remotes".format(fsb)
msg = f"Setting update lock for {fsb} remotes"
if remote:
if not isinstance(remote, str):
errors.append(
"Badly formatted remote pattern '{}'".format(remote)
)
errors.append(f"Badly formatted remote pattern '{remote}'")
continue
else:
msg += " matching {}".format(remote)
msg += f" matching {remote}"
log.debug(msg)
good, bad = self.servers[fstr](remote=remote)
locked.extend(good)
@ -464,7 +462,7 @@ class Fileserver:
cleared = []
errors = []
for fsb in back:
fstr = "{}.clear_lock".format(fsb)
fstr = f"{fsb}.clear_lock"
if fstr in self.servers:
good, bad = clear_lock(self.servers[fstr], fsb, remote=remote)
cleared.extend(good)
@ -478,7 +476,7 @@ class Fileserver:
"""
back = self.backends(back)
for fsb in back:
fstr = "{}.update".format(fsb)
fstr = f"{fsb}.update"
if fstr in self.servers:
log.debug("Updating %s fileserver cache", fsb)
self.servers[fstr](**kwargs)
@ -491,7 +489,7 @@ class Fileserver:
back = self.backends(back)
ret = {}
for fsb in back:
fstr = "{}.update_intervals".format(fsb)
fstr = f"{fsb}.update_intervals"
if fstr in self.servers:
ret[fsb] = self.servers[fstr]()
return ret
@ -505,7 +503,7 @@ class Fileserver:
if sources:
ret = {}
for fsb in back:
fstr = "{}.envs".format(fsb)
fstr = f"{fsb}.envs"
kwargs = (
{"ignore_cache": True}
if "ignore_cache" in _argspec(self.servers[fstr]).args
@ -535,7 +533,7 @@ class Fileserver:
"""
back = self.backends(back)
for fsb in back:
fstr = "{}.init".format(fsb)
fstr = f"{fsb}.init"
if fstr in self.servers:
self.servers[fstr]()
@ -568,11 +566,6 @@ class Fileserver:
saltenv = salt.utils.stringutils.to_unicode(saltenv)
back = self.backends(back)
kwargs = {}
fnd = {"path": "", "rel": ""}
if os.path.isabs(path):
return fnd
if "../" in path:
return fnd
if salt.utils.url.is_escaped(path):
# don't attempt to find URL query arguments in the path
path = salt.utils.url.unescape(path)
@ -588,6 +581,10 @@ class Fileserver:
args = comp.split("=", 1)
kwargs[args[0]] = args[1]
fnd = {"path": "", "rel": ""}
if os.path.isabs(path) or "../" in path:
return fnd
if "env" in kwargs:
# "env" is not supported; Use "saltenv".
kwargs.pop("env")
@ -598,7 +595,7 @@ class Fileserver:
saltenv = str(saltenv)
for fsb in back:
fstr = "{}.find_file".format(fsb)
fstr = f"{fsb}.find_file"
if fstr in self.servers:
fnd = self.servers[fstr](path, saltenv, **kwargs)
if fnd.get("path"):
@ -768,7 +765,7 @@ class Fileserver:
load["saltenv"] = str(load["saltenv"])
for fsb in self.backends(load.pop("fsbackend", None)):
fstr = "{}.file_list".format(fsb)
fstr = f"{fsb}.file_list"
if fstr in self.servers:
ret.update(self.servers[fstr](load))
# some *fs do not handle prefix. Ensure it is filtered
@ -793,7 +790,7 @@ class Fileserver:
load["saltenv"] = str(load["saltenv"])
for fsb in self.backends(None):
fstr = "{}.file_list_emptydirs".format(fsb)
fstr = f"{fsb}.file_list_emptydirs"
if fstr in self.servers:
ret.update(self.servers[fstr](load))
# some *fs do not handle prefix. Ensure it is filtered
@ -818,7 +815,7 @@ class Fileserver:
load["saltenv"] = str(load["saltenv"])
for fsb in self.backends(load.pop("fsbackend", None)):
fstr = "{}.dir_list".format(fsb)
fstr = f"{fsb}.dir_list"
if fstr in self.servers:
ret.update(self.servers[fstr](load))
# some *fs do not handle prefix. Ensure it is filtered
@ -843,7 +840,7 @@ class Fileserver:
load["saltenv"] = str(load["saltenv"])
for fsb in self.backends(load.pop("fsbackend", None)):
symlstr = "{}.symlink_list".format(fsb)
symlstr = f"{fsb}.symlink_list"
if symlstr in self.servers:
ret = self.servers[symlstr](load)
# some *fs do not handle prefix. Ensure it is filtered

View file

@ -27,6 +27,7 @@ import salt.utils.hashutils
import salt.utils.path
import salt.utils.platform
import salt.utils.stringutils
import salt.utils.verify
import salt.utils.versions
log = logging.getLogger(__name__)
@ -98,6 +99,11 @@ def find_file(path, saltenv="base", **kwargs):
if saltenv == "__env__":
root = root.replace("__env__", actual_saltenv)
full = os.path.join(root, path)
# Refuse to serve file that is not under the root.
if not salt.utils.verify.clean_path(root, full, subdir=True):
continue
if os.path.isfile(full) and not salt.fileserver.is_file_ignored(__opts__, full):
fnd["path"] = full
fnd["rel"] = path
@ -128,6 +134,26 @@ def serve_file(load, fnd):
ret["dest"] = fnd["rel"]
gzip = load.get("gzip", None)
fpath = os.path.normpath(fnd["path"])
actual_saltenv = saltenv = load["saltenv"]
if saltenv not in __opts__["file_roots"]:
if "__env__" in __opts__["file_roots"]:
log.debug(
"salt environment '%s' maps to __env__ file_roots directory", saltenv
)
saltenv = "__env__"
else:
return fnd
file_in_root = False
for root in __opts__["file_roots"][saltenv]:
if saltenv == "__env__":
root = root.replace("__env__", actual_saltenv)
# Refuse to serve file that is not under the root.
if salt.utils.verify.clean_path(root, fpath, subdir=True):
file_in_root = True
if not file_in_root:
return ret
with salt.utils.files.fopen(fpath, "rb") as fp_:
fp_.seek(load["loc"])
data = fp_.read(__opts__["file_buffer_size"])
@ -193,9 +219,7 @@ def update():
os.makedirs(mtime_map_path_dir)
with salt.utils.files.fopen(mtime_map_path, "wb") as fp_:
for file_path, mtime in new_mtime_map.items():
fp_.write(
salt.utils.stringutils.to_bytes("{}:{}\n".format(file_path, mtime))
)
fp_.write(salt.utils.stringutils.to_bytes(f"{file_path}:{mtime}\n"))
if __opts__.get("fileserver_events", False):
# if there is a change, fire an event
@ -326,11 +350,11 @@ def _file_lists(load, form):
return []
list_cache = os.path.join(
list_cachedir,
"{}.p".format(salt.utils.files.safe_filename_leaf(actual_saltenv)),
f"{salt.utils.files.safe_filename_leaf(actual_saltenv)}.p",
)
w_lock = os.path.join(
list_cachedir,
".{}.w".format(salt.utils.files.safe_filename_leaf(actual_saltenv)),
f".{salt.utils.files.safe_filename_leaf(actual_saltenv)}.w",
)
cache_match, refresh_cache, save_cache = salt.fileserver.check_file_list_cache(
__opts__, form, list_cache, w_lock

View file

@ -130,17 +130,18 @@ def _module_dirs(
):
if tag is None:
tag = ext_type
sys_types = os.path.join(base_path or str(SALT_BASE_PATH), int_type or ext_type)
return_types = [sys_types]
if opts.get("extension_modules"):
ext_types = os.path.join(opts["extension_modules"], ext_type)
return_types.insert(0, ext_types)
sys_types = [os.path.join(base_path or str(SALT_BASE_PATH), int_type or ext_type)]
if not sys_types.startswith(SALT_INTERNAL_LOADERS_PATHS):
if opts.get("extension_modules"):
ext_types = [os.path.join(opts["extension_modules"], ext_type)]
else:
ext_types = []
if not sys_types[0].startswith(SALT_INTERNAL_LOADERS_PATHS):
raise RuntimeError(
"{!r} is not considered a salt internal loader path. If this "
"is a new loader being added, please also add it to "
"{}.SALT_INTERNAL_LOADERS_PATHS.".format(sys_types, __name__)
"{}.SALT_INTERNAL_LOADERS_PATHS.".format(sys_types[0], __name__)
)
ext_type_types = []
@ -250,7 +251,17 @@ def _module_dirs(
if os.path.isdir(maybe_dir):
cli_module_dirs.insert(0, maybe_dir)
return cli_module_dirs + ext_type_types + return_types
if opts.get("features", {}).get(
"enable_deprecated_module_search_path_priority", False
):
salt.utils.versions.warn_until(
3008,
"The old module search path priority will be removed in Salt 3008. "
"For more information see https://github.com/saltstack/salt/pull/65938.",
)
return cli_module_dirs + ext_type_types + ext_types + sys_types
else:
return cli_module_dirs + ext_types + ext_type_types + sys_types
def minion_mods(

View file

@ -1901,10 +1901,16 @@ class AESFuncs(TransportMethods):
self.mminion.returners[fstr](load["jid"], load["load"])
# Register the syndic
# We are creating a path using user suplied input. Use the
# clean_path to prevent a directory traversal.
root = os.path.join(self.opts["cachedir"], "syndics")
syndic_cache_path = os.path.join(
self.opts["cachedir"], "syndics", load["id"]
)
if not os.path.exists(syndic_cache_path):
if salt.utils.verify.clean_path(
root, syndic_cache_path
) and not os.path.exists(syndic_cache_path):
path_name = os.path.split(syndic_cache_path)[0]
if not os.path.exists(path_name):
os.makedirs(path_name)

View file

@ -33,7 +33,6 @@ import salt.syspaths
import salt.utils.data
import salt.utils.event
import salt.utils.versions
from salt.features import features
log = logging.getLogger(__name__)
@ -812,11 +811,11 @@ def runner(name, **kwargs):
"executed" if success else "failed",
)
if features.get("enable_deprecated_orchestration_flag", False):
if __opts__["features"].get("enable_deprecated_orchestration_flag", False):
ret["__orchestration__"] = True
salt.utils.versions.warn_until(
3008,
"The __orchestration__ return flag will be removed in Salt Argon. "
"The __orchestration__ return flag will be removed in {version}. "
"For more information see https://github.com/saltstack/salt/pull/59917.",
)
@ -1061,7 +1060,7 @@ def wheel(name, **kwargs):
"executed" if success else "failed",
)
if features.get("enable_deprecated_orchestration_flag", False):
if __opts__["features"].get("enable_deprecated_orchestration_flag", False):
ret["__orchestration__"] = True
salt.utils.versions.warn_until(
3008,

View file

@ -191,7 +191,6 @@ import re
import salt.exceptions
import salt.utils.versions
from salt.features import features
try:
from M2Crypto.RSA import RSAError
@ -205,7 +204,7 @@ def __virtual__():
"""
only load this module if the corresponding execution module is loaded
"""
if features.get("x509_v2"):
if __opts__["features"].get("x509_v2"):
return (False, "Superseded, using x509_v2")
if "x509.get_pem_entry" in __salt__:
salt.utils.versions.warn_until(

View file

@ -188,7 +188,6 @@ import os.path
import salt.utils.files
from salt.exceptions import CommandExecutionError, SaltInvocationError
from salt.features import features
from salt.state import STATE_INTERNAL_KEYWORDS as _STATE_INTERNAL_KEYWORDS
try:
@ -211,7 +210,7 @@ __virtualname__ = "x509"
def __virtual__():
if not HAS_CRYPTOGRAPHY:
return (False, "Could not load cryptography")
if not features.get("x509_v2"):
if not __opts__["features"].get("x509_v2"):
return (
False,
"x509_v2 needs to be explicitly enabled by setting `x509_v2: true` "

View file

@ -25,7 +25,6 @@ import salt._logging
import salt.config as config
import salt.defaults.exitcodes
import salt.exceptions
import salt.features
import salt.syspaths as syspaths
import salt.utils.args
import salt.utils.data
@ -1931,9 +1930,7 @@ class MasterOptionParser(
_default_logging_logfile_ = config.DEFAULT_MASTER_OPTS["log_file"]
def setup_config(self):
opts = config.master_config(self.get_config_file_path())
salt.features.setup_features(opts)
return opts
return config.master_config(self.get_config_file_path())
class MinionOptionParser(
@ -1948,13 +1945,11 @@ class MinionOptionParser(
_default_logging_logfile_ = config.DEFAULT_MINION_OPTS["log_file"]
def setup_config(self):
opts = config.minion_config(
return config.minion_config(
self.get_config_file_path(), # pylint: disable=no-member
cache_minion_id=True,
ignore_config_errors=False,
)
salt.features.setup_features(opts)
return opts
class ProxyMinionOptionParser(
@ -1985,11 +1980,9 @@ class ProxyMinionOptionParser(
except AttributeError:
minion_id = None
opts = config.proxy_config(
return config.proxy_config(
self.get_config_file_path(), cache_minion_id=False, minion_id=minion_id
)
salt.features.setup_features(opts)
return opts
class SyndicOptionParser(
@ -2019,11 +2012,9 @@ class SyndicOptionParser(
]
def setup_config(self):
opts = config.syndic_config(
return config.syndic_config(
self.get_config_file_path(), self.get_config_file_path("minion")
)
salt.features.setup_features(opts)
return opts
class SaltCMDOptionParser(
@ -2362,9 +2353,7 @@ class SaltCMDOptionParser(
self.exit(42, "\nIncomplete options passed.\n\n")
def setup_config(self):
opts = config.client_config(self.get_config_file_path())
salt.features.setup_features(opts)
return opts
return config.client_config(self.get_config_file_path())
class SaltCPOptionParser(
@ -2435,9 +2424,7 @@ class SaltCPOptionParser(
self.config["dest"] = self.args[-1]
def setup_config(self):
opts = config.master_config(self.get_config_file_path())
salt.features.setup_features(opts)
return opts
return config.master_config(self.get_config_file_path())
class SaltKeyOptionParser(
@ -2729,7 +2716,6 @@ class SaltKeyOptionParser(
# Since we're generating the keys, some defaults can be assumed
# or tweaked
keys_config["pki_dir"] = self.options.gen_keys_dir
salt.features.setup_features(keys_config)
return keys_config
def process_rotate_aes_key(self):
@ -2976,7 +2962,6 @@ class SaltCallOptionParser(
opts = config.minion_config(
self.get_config_file_path(), cache_minion_id=True
)
salt.features.setup_features(opts)
return opts
def process_module_dirs(self):
@ -3078,9 +3063,7 @@ class SaltRunOptionParser(
self.config["arg"] = []
def setup_config(self):
opts = config.client_config(self.get_config_file_path())
salt.features.setup_features(opts)
return opts
return config.client_config(self.get_config_file_path())
class SaltSSHOptionParser(
@ -3420,9 +3403,7 @@ class SaltSSHOptionParser(
break
def setup_config(self):
opts = config.master_config(self.get_config_file_path())
salt.features.setup_features(opts)
return opts
return config.master_config(self.get_config_file_path())
class SaltCloudParser(
@ -3483,11 +3464,9 @@ class SaltCloudParser(
def setup_config(self):
try:
opts = config.cloud_config(self.get_config_file_path())
return config.cloud_config(self.get_config_file_path())
except salt.exceptions.SaltCloudConfigError as exc:
self.error(exc)
salt.features.setup_features(opts)
return opts
class SPMParser(
@ -3543,9 +3522,7 @@ class SPMParser(
self.error("Insufficient arguments")
def setup_config(self):
opts = salt.config.spm_config(self.get_config_file_path())
salt.features.setup_features(opts)
return opts
return salt.config.spm_config(self.get_config_file_path())
class SaltAPIParser(
@ -3571,8 +3548,6 @@ class SaltAPIParser(
_default_logging_logfile_ = config.DEFAULT_API_OPTS[_logfile_config_setting_name_]
def setup_config(self):
opts = salt.config.api_config(
return salt.config.api_config(
self.get_config_file_path()
) # pylint: disable=no-member
salt.features.setup_features(opts)
return opts

View file

@ -26,6 +26,66 @@ def venv(tmp_path):
yield _venv
@pytest.fixture
def module_dirs(tmp_path):
module_dir = tmp_path / "module-dir-base"
module_dir.joinpath("modules").mkdir(parents=True)
return [str(module_dir)]
def test_module_dirs_priority(venv, salt_extension, minion_opts, module_dirs):
# Install our extension into the virtualenv
venv.install(str(salt_extension.srcdir))
installed_packages = venv.get_installed_packages()
assert salt_extension.name in installed_packages
code = """
import sys
import json
import salt._logging
import salt.loader
minion_config = json.loads(sys.stdin.read())
salt._logging.set_logging_options_dict(minion_config)
salt._logging.setup_logging()
mod_dirs = salt.loader._module_dirs(minion_config, "modules", "module")
print(json.dumps(mod_dirs))
"""
minion_opts["module_dirs"] = module_dirs
ret = venv.run_code(code, input=json.dumps(minion_opts))
module_dirs_return = json.loads(ret.stdout)
assert len(module_dirs_return) == 5
for i, tail in enumerate(
[
"/module-dir-base/modules",
"/var/cache/salt/minion/extmods/modules",
"/module-dir-base",
"/site-packages/salt_ext_loader_test/modules",
"/site-packages/salt/modules",
]
):
assert module_dirs_return[i].endswith(
tail
), f"{module_dirs_return[i]} does not end with {tail}"
# Test the deprecated mode as well
minion_opts["features"] = {"enable_deprecated_module_search_path_priority": True}
ret = venv.run_code(code, input=json.dumps(minion_opts))
module_dirs_return = json.loads(ret.stdout)
assert len(module_dirs_return) == 5
for i, tail in enumerate(
[
"/module-dir-base/modules",
"/module-dir-base",
"/site-packages/salt_ext_loader_test/modules",
"/var/cache/salt/minion/extmods/modules",
"/site-packages/salt/modules",
]
):
assert module_dirs_return[i].endswith(
tail
), f"{module_dirs_return[i]} does not end with {tail}"
def test_new_entry_points_passing_module(venv, salt_extension, salt_minion_factory):
# Install our extension into the virtualenv
venv.install(str(salt_extension.srcdir))

View file

@ -431,7 +431,7 @@ def setup_macos(
assert ret.returncode == 0, ret
else:
# We are testing the onedir download
onedir_name = f"salt-{salt_release}-onedir-darwin-{arch}.tar.xz"
onedir_name = f"salt-{salt_release}-onedir-macos-{arch}.tar.xz"
if repo_subpath == "minor":
repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}"
else:

View file

@ -0,0 +1,35 @@
import pytest
import salt.channel.server as server
@pytest.fixture
def key_data():
return [
"-----BEGIN PUBLIC KEY-----",
"MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAoe5QSDYRWKyknbVyRrIj",
"rm1ht5HgKzAVUber0x54+b/UgxTd1cqI6I+eDlx53LqZSH3G8Rd5cUh8LHoGedSa",
"E62vEiLAjgXa+RdgcGiQpYS8+Z2RvQJ8oIcZgO+2AzgBRHboNWHTYRRmJXCd3dKs",
"9tcwK6wxChR06HzGqaOTixAuQlegWbOTU+X4dXIbW7AnuQBt9MCib7SxHlscrqcS",
"cBrRvq51YP6cxPm/rZJdBqZhVrlghBvIpa45NApP5PherGi4AbEGYte4l+gC+fOA",
"osEBis1V27djPpIyQS4qk3XAPQg6CYQMDltHqA4Fdo0Nt7SMScxJhfH0r6zmBFAe",
"BQIDAQAB",
"-----END PUBLIC KEY-----",
]
@pytest.mark.parametrize("linesep", ["\r\n", "\r", "\n"])
def test__clean_key(key_data, linesep):
tst_key = linesep.join(key_data)
chk_key = "\n".join(key_data)
clean_func = server.ReqServerChannel._clean_key
assert clean_func(tst_key) == clean_func(chk_key)
@pytest.mark.parametrize("linesep", ["\r\n", "\r", "\n"])
def test__clean_key_mismatch(key_data, linesep):
tst_key = linesep.join(key_data)
tst_key = tst_key.replace("5", "4")
chk_key = "\n".join(key_data)
clean_func = server.ReqServerChannel._clean_key
assert clean_func(tst_key) != clean_func(chk_key)

View file

@ -5,6 +5,7 @@
import copy
import pathlib
import shutil
import sys
import textwrap
import pytest
@ -28,14 +29,14 @@ def unicode_dirname():
return "соль"
@pytest.fixture(autouse=True)
@pytest.fixture
def testfile(tmp_path):
fp = tmp_path / "testfile"
fp.write_text("This is a testfile")
return fp
@pytest.fixture(autouse=True)
@pytest.fixture
def tmp_state_tree(tmp_path, testfile, unicode_filename, unicode_dirname):
dirname = tmp_path / "roots_tmp_state_tree"
dirname.mkdir(parents=True, exist_ok=True)
@ -54,11 +55,15 @@ def tmp_state_tree(tmp_path, testfile, unicode_filename, unicode_dirname):
@pytest.fixture
def configure_loader_modules(tmp_state_tree, temp_salt_master):
opts = temp_salt_master.config.copy()
def testfilepath(tmp_state_tree, testfile):
return tmp_state_tree / testfile.name
@pytest.fixture
def configure_loader_modules(tmp_state_tree, master_opts):
overrides = {"file_roots": {"base": [str(tmp_state_tree)]}}
opts.update(overrides)
return {roots: {"__opts__": opts}}
master_opts.update(overrides)
return {roots: {"__opts__": master_opts}}
def test_file_list(unicode_filename):
@ -75,17 +80,17 @@ def test_find_file(tmp_state_tree):
assert full_path_to_file == ret["path"]
def test_serve_file(testfile):
def test_serve_file(testfilepath):
with patch.dict(roots.__opts__, {"file_buffer_size": 262144}):
load = {
"saltenv": "base",
"path": str(testfile),
"path": str(testfilepath),
"loc": 0,
}
fnd = {"path": str(testfile), "rel": "testfile"}
fnd = {"path": str(testfilepath), "rel": "testfile"}
ret = roots.serve_file(load, fnd)
with salt.utils.files.fopen(str(testfile), "rb") as fp_:
with salt.utils.files.fopen(str(testfilepath), "rb") as fp_:
data = fp_.read()
assert ret == {"data": data, "dest": "testfile"}
@ -236,7 +241,7 @@ def test_update_mtime_map():
# between Python releases.
lines_written = sorted(mtime_map_mock.write_calls())
expected = sorted(
salt.utils.stringutils.to_bytes("{key}:{val}\n".format(key=key, val=val))
salt.utils.stringutils.to_bytes(f"{key}:{val}\n")
for key, val in new_mtime_map.items()
)
assert lines_written == expected, lines_written
@ -277,3 +282,36 @@ def test_update_mtime_map_unicode_error(tmp_path):
},
"backend": "roots",
}
def test_find_file_not_in_root(tmp_state_tree):
"""
Fileroots should never 'find' a file that is outside of it's root.
"""
badfile = pathlib.Path(tmp_state_tree).parent / "bar"
badfile.write_text("Bad file")
badpath = f"../bar"
ret = roots.find_file(badpath)
assert ret == {"path": "", "rel": ""}
badpath = f"{tmp_state_tree / '..' / 'bar'}"
ret = roots.find_file(badpath)
assert ret == {"path": "", "rel": ""}
def test_serve_file_not_in_root(tmp_state_tree):
"""
Fileroots should never 'serve' a file that is outside of it's root.
"""
badfile = pathlib.Path(tmp_state_tree).parent / "bar"
badfile.write_text("Bad file")
badpath = f"../bar"
load = {"path": "salt://|..\\bar", "saltenv": "base", "loc": 0}
fnd = {
"path": f"{tmp_state_tree / '..' / 'bar'}",
"rel": f"{pathlib.Path('..') / 'bar'}",
}
ret = roots.serve_file(load, fnd)
if "win" in sys.platform:
assert ret == {"data": "", "dest": "..\\bar"}
else:
assert ret == {"data": "", "dest": "../bar"}

View file

@ -0,0 +1,123 @@
import datetime
import os
import time
import salt.fileserver
import salt.utils.files
def test_diff_with_diffent_keys():
"""
Test that different maps are indeed reported different
"""
map1 = {"file1": 1234}
map2 = {"file2": 1234}
assert salt.fileserver.diff_mtime_map(map1, map2) is True
def test_diff_with_diffent_values():
"""
Test that different maps are indeed reported different
"""
map1 = {"file1": 12345}
map2 = {"file1": 1234}
assert salt.fileserver.diff_mtime_map(map1, map2) is True
def test_whitelist():
opts = {
"fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
"extension_modules": "",
}
fs = salt.fileserver.Fileserver(opts)
assert sorted(fs.servers.whitelist) == sorted(
["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
), fs.servers.whitelist
def test_future_file_list_cache_file_ignored(tmp_path):
opts = {
"fileserver_backend": ["roots"],
"cachedir": tmp_path,
"extension_modules": "",
}
back_cachedir = os.path.join(tmp_path, "file_lists/roots")
os.makedirs(os.path.join(back_cachedir))
# Touch a couple files
for filename in ("base.p", "foo.txt"):
with salt.utils.files.fopen(os.path.join(back_cachedir, filename), "wb") as _f:
if filename == "base.p":
_f.write(b"\x80")
# Set modification time to file list cache file to 1 year in the future
now = datetime.datetime.utcnow()
future = now + datetime.timedelta(days=365)
mod_time = time.mktime(future.timetuple())
os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
list_cache = os.path.join(back_cachedir, "base.p")
w_lock = os.path.join(back_cachedir, ".base.w")
ret = salt.fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
assert (
ret[1] is True
), "Cache file list cache file is not refreshed when future modification time"
def test_file_server_url_escape(tmp_path):
(tmp_path / "srv").mkdir()
(tmp_path / "srv" / "salt").mkdir()
(tmp_path / "foo").mkdir()
(tmp_path / "foo" / "bar").write_text("Bad file")
fileroot = str(tmp_path / "srv" / "salt")
badfile = str(tmp_path / "foo" / "bar")
opts = {
"fileserver_backend": ["roots"],
"extension_modules": "",
"optimization_order": [
0,
],
"file_roots": {
"base": [fileroot],
},
"file_ignore_regex": "",
"file_ignore_glob": "",
}
fs = salt.fileserver.Fileserver(opts)
ret = fs.find_file(
"salt://|..\\..\\..\\foo/bar",
"base",
)
assert ret == {"path": "", "rel": ""}
def test_file_server_serve_url_escape(tmp_path):
(tmp_path / "srv").mkdir()
(tmp_path / "srv" / "salt").mkdir()
(tmp_path / "foo").mkdir()
(tmp_path / "foo" / "bar").write_text("Bad file")
fileroot = str(tmp_path / "srv" / "salt")
badfile = str(tmp_path / "foo" / "bar")
opts = {
"fileserver_backend": ["roots"],
"extension_modules": "",
"optimization_order": [
0,
],
"file_roots": {
"base": [fileroot],
},
"file_ignore_regex": "",
"file_ignore_glob": "",
"file_buffer_size": 2048,
}
fs = salt.fileserver.Fileserver(opts)
ret = fs.serve_file(
{
"path": "salt://|..\\..\\..\\foo/bar",
"saltenv": "base",
"loc": 0,
}
)
assert ret == {"data": "", "dest": ""}

View file

@ -1021,3 +1021,35 @@ def test_key_dfn_wait(cluster_maintenance):
thread.join()
assert time.time() - start >= 5
assert dfn.read_text() == "othermaster"
def test_syndic_return_cache_dir_creation(encrypted_requests):
"""master's cachedir for a syndic will be created by AESFuncs._syndic_return method"""
cachedir = pathlib.Path(encrypted_requests.opts["cachedir"])
assert not (cachedir / "syndics").exists()
encrypted_requests._syndic_return(
{
"id": "mamajama",
"jid": "",
"return": {},
}
)
assert (cachedir / "syndics").exists()
assert (cachedir / "syndics" / "mamajama").exists()
def test_syndic_return_cache_dir_creation_traversal(encrypted_requests):
"""
master's AESFuncs._syndic_return method cachdir creation is not vulnerable to a directory traversal
"""
cachedir = pathlib.Path(encrypted_requests.opts["cachedir"])
assert not (cachedir / "syndics").exists()
encrypted_requests._syndic_return(
{
"id": "../mamajama",
"jid": "",
"return": {},
}
)
assert not (cachedir / "syndics").exists()
assert not (cachedir / "mamajama").exists()

View file

@ -1,79 +0,0 @@
"""
:codeauthor: Joao Mesquita <jmesquita@sangoma.com>
"""
import datetime
import os
import time
import salt.utils.files
from salt import fileserver
from tests.support.helpers import with_tempdir
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
class MapDiffTestCase(TestCase):
def test_diff_with_diffent_keys(self):
"""
Test that different maps are indeed reported different
"""
map1 = {"file1": 1234}
map2 = {"file2": 1234}
assert fileserver.diff_mtime_map(map1, map2) is True
def test_diff_with_diffent_values(self):
"""
Test that different maps are indeed reported different
"""
map1 = {"file1": 12345}
map2 = {"file1": 1234}
assert fileserver.diff_mtime_map(map1, map2) is True
class VCSBackendWhitelistCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {fileserver: {}}
def test_whitelist(self):
opts = {
"fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
"extension_modules": "",
}
fs = fileserver.Fileserver(opts)
assert sorted(fs.servers.whitelist) == sorted(
["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
), fs.servers.whitelist
@with_tempdir()
def test_future_file_list_cache_file_ignored(self, cachedir):
opts = {
"fileserver_backend": ["roots"],
"cachedir": cachedir,
"extension_modules": "",
}
back_cachedir = os.path.join(cachedir, "file_lists/roots")
os.makedirs(os.path.join(back_cachedir))
# Touch a couple files
for filename in ("base.p", "foo.txt"):
with salt.utils.files.fopen(
os.path.join(back_cachedir, filename), "wb"
) as _f:
if filename == "base.p":
_f.write(b"\x80")
# Set modification time to file list cache file to 1 year in the future
now = datetime.datetime.utcnow()
future = now + datetime.timedelta(days=365)
mod_time = time.mktime(future.timetuple())
os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
list_cache = os.path.join(back_cachedir, "base.p")
w_lock = os.path.join(back_cachedir, ".base.w")
ret = fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
assert (
ret[1] is True
), "Cache file list cache file is not refreshed when future modification time"

View file

@ -1285,7 +1285,9 @@ def upload_coverage(ctx: Context, reports_path: pathlib.Path, commit_sha: str =
break
if current_attempt >= max_attempts:
ctx.error(f"Failed to upload {fpath} to codecov")
ctx.error(f"Failed to upload {fpath} to codecov:")
ctx.console_stdout.print(stdout)
ctx.console.print(stderr)
ctx.exit(1)
ctx.warn(f"Waiting {sleep_time} seconds until next retry...")

View file

@ -62,6 +62,10 @@ _deb_distro_info = {
"label": "salt_ubuntu2204",
"codename": "jammy",
},
"23.04": {
"label": "salt_ubuntu2304",
"codename": "lunar",
},
},
}

View file

@ -253,24 +253,43 @@ def generate_workflows(ctx: Context):
test_salt_pkg_downloads_needs_slugs.add("build-ci-deps")
build_rpms_listing = []
for distro, releases in (
("amazon", ("2", "2023")),
("redhat", ("7", "8", "9")),
("fedora", ("36", "37", "38")),
("photon", ("3", "4", "5")),
):
for release in releases:
rpm_os_versions: dict[str, list[str]] = {
"amazon": [],
"fedora": [],
"photon": [],
"redhat": [],
}
for slug in sorted(AMIS):
if slug.endswith("-arm64"):
continue
if not slug.startswith(
("amazonlinux", "almalinux", "centos", "fedora", "photonos")
):
continue
os_name, os_version = slug.split("-")
if os_name == "amazonlinux":
rpm_os_versions["amazon"].append(os_version)
elif os_name == "photonos":
rpm_os_versions["photon"].append(os_version)
elif os_name == "fedora":
rpm_os_versions["fedora"].append(os_version)
else:
rpm_os_versions["redhat"].append(os_version)
for distro, releases in sorted(rpm_os_versions.items()):
for release in sorted(set(releases)):
for arch in ("x86_64", "arm64", "aarch64"):
build_rpms_listing.append((distro, release, arch))
build_debs_listing = []
for distro, releases in (
("debian", ("10", "11", "12")),
("ubuntu", ("20.04", "22.04")),
):
for release in releases:
for arch in ("x86_64", "arm64"):
build_debs_listing.append((distro, release, arch))
for slug in sorted(AMIS):
if not slug.startswith(("debian-", "ubuntu-")):
continue
if slug.endswith("-arm64"):
continue
os_name, os_version = slug.split("-")
for arch in ("x86_64", "arm64"):
build_debs_listing.append((os_name, os_version, arch))
env = Environment(
block_start_string="<%",

View file

@ -135,13 +135,13 @@ def download_onedir_artifact(
s3 = boto3.client("s3")
if platform == "darwin":
platform = "macos"
if arch == "arm64":
arch = "aarch64"
if arch == "aarch64":
arch = "arm64"
arch = arch.lower()
platform = platform.lower()
if platform in ("linux", "macos") and arch not in ("x86_64", "aarch64"):
if platform in ("linux", "macos") and arch not in ("x86_64", "arm64"):
ctx.error(
f"The 'arch' value for {platform} must be one of: 'x86_64', 'aarch64', 'arm64'"
f"The 'arch' value for {platform} must be one of: 'x86_64', 'aarch64', 'aarch64'"
)
ctx.exit(1)
if platform == "windows" and arch not in ("x86", "amd64"):