Merge remote-tracking branch 'saltstack/3006.x' into merge/3007.x/3006.x

This commit is contained in:
Daniel A. Wozniak 2024-09-20 14:20:13 -07:00
commit 6b3418fe0b
100 changed files with 1638 additions and 506 deletions

View file

@ -1,7 +1,7 @@
blank_issues_enabled: true
contact_links:
- name: Salt Community Slack
url: https://saltstackcommunity.slack.com/
- name: Salt Community Discord
url: https://discord.com/invite/J7b7EscrAs
about: Please ask and answer questions here.
- name: Salt-Users Forum
url: https://groups.google.com/forum/#!forum/salt-users

View file

@ -8,7 +8,7 @@ assignees: ''
---
### Description of the tech debt to be addressed, include links and screenshots
<!-- Note: Please direct questions to the salt-users google group, IRC or Community Slack. -->
<!-- Note: Please direct questions to the salt-users google group, IRC or Community Discord. -->
### Versions Report
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)

View file

@ -11,7 +11,9 @@ Remove this section if not relevant
### Merge requirements satisfied?
**[NOTICE] Bug fixes or features added to Salt require tests.**
<!-- Please review the [test documentation](https://docs.saltproject.io/en/master/topics/tutorials/writing_tests.html) for details on how to implement tests into Salt's test suite. -->
<!-- Please review the test documentation for details on how to implement tests
into Salt's test suite:
https://docs.saltproject.io/en/master/topics/tutorials/writing_tests.html -->
- [ ] Docs
- [ ] Changelog - https://docs.saltproject.io/en/master/topics/development/changelog.html
- [ ] Tests written/updated
@ -19,7 +21,13 @@ Remove this section if not relevant
### Commits signed with GPG?
Yes/No
Please review [Salt's Contributing Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) for best practices, including the
[PR Guidelines](https://docs.saltproject.io/en/master/topics/development/pull_requests.html).
<!-- Please review Salt's Contributing Guide for best practices and guidance in
choosing the right branch:
https://docs.saltproject.io/en/master/topics/development/contributing.html -->
See GitHub's [page on GPG signing](https://help.github.com/articles/signing-commits-using-gpg/) for more information about signing commits with GPG.
<!-- Additional guidance for pull requests can be found here:
https://docs.saltproject.io/en/master/topics/development/pull_requests.html -->
<!-- See GitHub's page on GPG signing for more information about signing commits
with GPG:
https://help.github.com/articles/signing-commits-using-gpg/ -->

4
.github/config.yml vendored
View file

@ -13,7 +13,7 @@ newIssueWelcomeComment: >
- [Community Wiki](https://github.com/saltstack/community/wiki)
- [Salts Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
- [Join our Community Slack](https://via.vmw.com/salt-slack)
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
- [IRC on LiberaChat](https://web.libera.chat/#salt)
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
- [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss)
@ -39,7 +39,7 @@ newPRWelcomeComment: >
- [Community Wiki](https://github.com/saltstack/community/wiki)
- [Salts Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
- [Join our Community Slack](https://via.vmw.com/salt-slack)
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
- [IRC on LiberaChat](https://web.libera.chat/#salt)
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
- [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss)

View file

@ -128,6 +128,9 @@ jobs:
- pkg/**
- *pkg_requirements
- *salt_added_modified
nsis_tests:
- added|modified: &nsis_tests
- pkg/windows/nsis/**
testrun:
- added|modified:
- *pkg_requirements
@ -260,6 +263,14 @@ jobs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
nsis-tests:
name: NSIS Tests
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/nsis-tests.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
prepare-release:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
@ -1537,27 +1548,6 @@ jobs:
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
archlinux-lts:
name: Arch Linux LTS Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: archlinux-lts
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}
workflow-slug: ci
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }}
@ -2005,7 +1995,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12
@ -2059,8 +2048,9 @@ jobs:
id: get-coverage-reports
uses: actions/download-artifact@v4
with:
name: all-testrun-coverage-artifacts
path: artifacts/coverage/
pattern: all-testrun-coverage-artifacts*
merge-multiple: true
- name: Display structure of downloaded files
run: tree -a artifacts/
@ -2152,6 +2142,7 @@ jobs:
- prepare-workflow
- pre-commit
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir
@ -2172,7 +2163,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12

View file

@ -185,6 +185,9 @@ jobs:
- pkg/**
- *pkg_requirements
- *salt_added_modified
nsis_tests:
- added|modified: &nsis_tests
- pkg/windows/nsis/**
testrun:
- added|modified:
- *pkg_requirements
@ -317,6 +320,14 @@ jobs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
nsis-tests:
name: NSIS Tests
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/nsis-tests.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
prepare-release:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
@ -1602,27 +1613,6 @@ jobs:
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
archlinux-lts:
name: Arch Linux LTS Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: archlinux-lts
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: nightly
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2070,7 +2060,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12
@ -2124,8 +2113,9 @@ jobs:
id: get-coverage-reports
uses: actions/download-artifact@v4
with:
name: all-testrun-coverage-artifacts
path: artifacts/coverage/
pattern: all-testrun-coverage-artifacts*
merge-multiple: true
- name: Display structure of downloaded files
run: tree -a artifacts/
@ -2977,7 +2967,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12
@ -3050,6 +3039,7 @@ jobs:
- prepare-workflow
- pre-commit
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir

67
.github/workflows/nsis-tests.yml vendored Normal file
View file

@ -0,0 +1,67 @@
---
name: Test NSIS Installer
on:
workflow_call:
inputs:
changed-files:
required: true
type: string
description: JSON string containing information about changed files
jobs:
Test-NSIS-Logic:
name: Logic Tests
runs-on:
- windows-latest
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['nsis_tests'] }}
steps:
- name: Checkout Salt
uses: actions/checkout@v4
- name: Set Up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install NSIS
run: .\pkg\windows\install_nsis.cmd -CICD
shell: cmd
- name: Build Test Installer
run: .\pkg\windows\nsis\tests\setup.cmd -CICD
shell: cmd
- name: Run Config Tests
run: .\pkg\windows\nsis\tests\test.cmd -CICD .\config_tests
shell: cmd
Test-NSIS-Stress:
name: Stress Tests
runs-on:
- windows-latest
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['nsis_tests'] }}
steps:
- name: Checkout Salt
uses: actions/checkout@v4
- name: Set Up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install NSIS
run: .\pkg\windows\install_nsis.cmd -CICD
shell: cmd
- name: Build Test Installer
run: .\pkg\windows\nsis\tests\setup.cmd -CICD
shell: cmd
- name: Run Stress Test
run: .\pkg\windows\nsis\tests\test.cmd -CICD .\stress_tests
shell: cmd

View file

@ -175,6 +175,9 @@ jobs:
- pkg/**
- *pkg_requirements
- *salt_added_modified
nsis_tests:
- added|modified: &nsis_tests
- pkg/windows/nsis/**
testrun:
- added|modified:
- *pkg_requirements
@ -307,6 +310,14 @@ jobs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
nsis-tests:
name: NSIS Tests
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/nsis-tests.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
prepare-release:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
@ -1584,27 +1595,6 @@ jobs:
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
archlinux-lts:
name: Arch Linux LTS Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: archlinux-lts
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: false
workflow-slug: scheduled
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2052,7 +2042,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12
@ -2106,8 +2095,9 @@ jobs:
id: get-coverage-reports
uses: actions/download-artifact@v4
with:
name: all-testrun-coverage-artifacts
path: artifacts/coverage/
pattern: all-testrun-coverage-artifacts*
merge-multiple: true
- name: Display structure of downloaded files
run: tree -a artifacts/
@ -2201,6 +2191,7 @@ jobs:
- prepare-workflow
- pre-commit
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir
@ -2221,7 +2212,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12

View file

@ -158,6 +158,9 @@ jobs:
- pkg/**
- *pkg_requirements
- *salt_added_modified
nsis_tests:
- added|modified: &nsis_tests
- pkg/windows/nsis/**
testrun:
- added|modified:
- *pkg_requirements
@ -299,6 +302,14 @@ jobs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
nsis-tests:
name: NSIS Tests
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/nsis-tests.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
prepare-release:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
@ -1584,27 +1595,6 @@ jobs:
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
archlinux-lts:
name: Arch Linux LTS Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: archlinux-lts
nox-session: ci-test-onedir
platform: linux
arch: x86_64
nox-version: 2022.8.7
gh-actions-python-version: "3.10"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.14
skip-code-coverage: true
workflow-slug: staging
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && 180 || 360 }}
debian-11:
name: Debian 11 Test
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
@ -2927,7 +2917,6 @@ jobs:
- amazonlinux-2-arm64
- amazonlinux-2023
- amazonlinux-2023-arm64
- archlinux-lts
- debian-11
- debian-11-arm64
- debian-12
@ -3044,6 +3033,7 @@ jobs:
- prepare-workflow
- pre-commit
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir

View file

@ -39,6 +39,19 @@
<%- endif %>
<%- set job_name = "nsis-tests" %>
<%- if includes.get(job_name, True) %>
<{ job_name }>:
<%- do conclusion_needs.append(job_name) %>
name: NSIS Tests
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/nsis-tests.yml
needs:
- prepare-workflow
with:
changed-files: ${{ needs.prepare-workflow.outputs.changed-files }}
<%- endif %>
<%- set job_name = "prepare-release" %>
<%- if includes.get(job_name, True) %>
@ -362,8 +375,9 @@
id: get-coverage-reports
uses: actions/download-artifact@v4
with:
name: all-testrun-coverage-artifacts
path: artifacts/coverage/
pattern: all-testrun-coverage-artifacts*
merge-multiple: true
- name: Display structure of downloaded files
run: tree -a artifacts/

View file

@ -176,6 +176,9 @@ jobs:
- pkg/**
- *pkg_requirements
- *salt_added_modified
nsis_tests:
- added|modified: &nsis_tests
- pkg/windows/nsis/**
testrun:
- added|modified:
- *pkg_requirements

View file

@ -286,6 +286,7 @@ jobs:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/coverage/
include-hidden-files: true
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
@ -357,8 +358,9 @@ jobs:
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
path: artifacts/coverage/
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}*
merge-multiple: true
- name: Show Downloaded Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
@ -400,3 +402,4 @@ jobs:
with:
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
path: artifacts/coverage
include-hidden-files: true

View file

@ -316,6 +316,7 @@ jobs:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
path: |
artifacts/coverage/
include-hidden-files: true
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
@ -387,8 +388,9 @@ jobs:
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: artifacts/coverage/
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}*
merge-multiple: true
- name: Show Downloaded Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
@ -435,3 +437,4 @@ jobs:
with:
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
path: artifacts/coverage
include-hidden-files: true

View file

@ -286,6 +286,7 @@ jobs:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/coverage/
include-hidden-files: true
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
@ -358,8 +359,9 @@ jobs:
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
path: artifacts/coverage/
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}*
merge-multiple: true
- name: Show Downloaded Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
@ -401,3 +403,4 @@ jobs:
with:
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
path: artifacts/coverage
include-hidden-files: true

View file

@ -1,38 +0,0 @@
---
name: Test Windows Installer
on: pull_request
permissions:
contents: read
jobs:
Test-Windows-Installer:
runs-on:
- windows-latest
steps:
- name: Checkout Salt
uses: actions/checkout@v4
- name: Set Up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install NSIS
run: .\pkg\windows\install_nsis.cmd -CICD
shell: cmd
- name: Build Test Installer
run: .\pkg\windows\nsis\tests\setup.cmd -CICD
shell: cmd
- name: Run Stress Test
run: .\pkg\windows\nsis\tests\test.cmd -CICD .\stress_tests
shell: cmd
- name: Run Config Tests
run: .\pkg\windows\nsis\tests\test.cmd -CICD .\config_tests
shell: cmd

1
.gitignore vendored
View file

@ -91,6 +91,7 @@ tests/unit/templates/roots
# Pycharm
.idea
venv/
.venv/
# VS Code
.vscode

View file

@ -1,30 +1,56 @@
============
Contributing
============
==============================================
Contributing to Salt: A Guide for Contributors
==============================================
So you want to contribute to the Salt project? Excellent! You can help
in a number of ways:
So, you want to contribute to the Salt project? That's fantastic! There are many
ways you can help improve Salt:
- Use Salt and open well-written bug reports.
- Join a `working group <https://github.com/saltstack/community>`__.
- Answer questions on `irc <https://web.libera.chat/#salt>`__,
the `community Slack <https://via.vmw.com/salt-slack>`__,
the `salt-users mailing
list <https://groups.google.com/forum/#!forum/salt-users>`__,
`Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
or `r/saltstack on Reddit <https://www.reddit.com/r/saltstack/>`__.
- Fix bugs.
- `Improve the documentation <https://saltstack.gitlab.io/open/docs/docs-hub/topics/contributing.html>`__.
- Provide workarounds, patches, or other code without tests.
- Tell other people about problems you solved using Salt.
- Use Salt and report bugs with clear, detailed descriptions.
- Join a `working group <https://github.com/saltstack/community>`__ to
collaborate with other contributors.
- Answer questions on platforms like `IRC <https://web.libera.chat/#salt>`__,
the `community Discord <https://discord.com/invite/J7b7EscrAs>`__,
the `salt-users mailing list <https://groups.google.com/forum/#!forum/salt-users>`__,
`Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
or `r/saltstack on Reddit <https://www.reddit.com/r/saltstack/>`__.
- Fix bugs or contribute to the `documentation <https://saltstack.gitlab.io/open/docs/docs-hub/topics/contributing.html>`__.
- Submit workarounds, patches, or code (even without tests).
- Share your experiences and solutions to problems you've solved using Salt.
If you'd like to update docs or fix an issue, you're going to need the
Salt repo. The best way to contribute is using
`Git <https://git-scm.com/>`__.
Choosing the Right Branch for Your Pull Request
===============================================
We appreciate your contributions to the project! To ensure a smooth and
efficient workflow, please follow these guidelines when submitting a Pull
Request. Each type of contribution—whether it's fixing a bug, adding a feature,
updating documentation, or fixing tests—should be targeted at the appropriate
branch. This helps us manage changes effectively and maintain stability across
versions.
- **Bug Fixes:**
Create your Pull Request against the oldest supported branch where the bug
exists. This ensures that the fix can be applied to all relevant versions.
- **New Features**:
For new features or enhancements, create your Pull Request against the master
branch.
- **Documentation Updates:**
Documentation changes should be made against the master branch, unless they
are related to a bug fix, in which case they should follow the same branch as
the bug fix.
- **Test Fixes:**
Pull Requests that fix broken or failing tests should be created against the
oldest supported branch where the issue occurs.
Setting Up Your Salt Development Environment
============================================
Environment setup
=================
To hack on Salt or the docs you're going to need to set up your
development environment. If you already have a workflow that you're
comfortable with, you can use that, but otherwise this is an opinionated
@ -109,7 +135,7 @@ Then activate it:
Sweet! Now you're ready to clone Salt so you can start hacking away! If
you get stuck at any point, check out the resources at the beginning of
this guide. IRC and Slack are particularly helpful places to go.
this guide. IRC and Discord are particularly helpful places to go.
Get the source!
@ -605,7 +631,7 @@ your PR is submitted during the week you should be able to expect some
kind of communication within that business day. If your tests are
passing and we're not in a code freeze, ideally your code will be merged
that week or month. If you haven't heard from your assigned reviewer, ping them
on GitHub, `irc <https://web.libera.chat/#salt>`__, or Community Slack.
on GitHub, `irc <https://web.libera.chat/#salt>`__, or Community Discord.
It's likely that your reviewer will leave some comments that need
addressing - it may be a style change, or you forgot a changelog entry,

View file

@ -10,9 +10,9 @@
:alt: PyPi Package Downloads
:target: https://lgtm.com/projects/g/saltstack/salt/context:python
.. image:: https://img.shields.io/badge/slack-SaltProject-blue.svg?logo=slack
:alt: Salt Project Slack Community
:target: https://via.vmw.com/salt-slack
.. image:: https://img.shields.io/badge/discord-SaltProject-blue.svg?logo=discord
:alt: Salt Project Discord Community
:target: https://discord.com/invite/J7b7EscrAs
.. image:: https://img.shields.io/twitch/status/saltprojectoss
:alt: Salt Project Twitch Channel
@ -103,7 +103,8 @@ Report bugs or problems using Salt by opening an issue: `<https://github.com/sal
To join our community forum where you can exchange ideas, best practices,
discuss technical support questions, and talk to project maintainers, join our
Slack workspace: `Salt Project Community Slack`_
Discord server: `Salt Project Community Discord`_
Salt Project documentation
@ -127,7 +128,7 @@ announcements.
Other channels to receive security announcements include the
`Salt Community mailing list <https://groups.google.com/forum/#!forum/salt-users>`_
and the `Salt Project Community Slack`_.
and the `Salt Project Community Discord`_.
Responsibly reporting security vulnerabilities
@ -153,7 +154,7 @@ Please be sure to review our
Also, check out some of our community resources including:
* `Salt Project Community Wiki <https://github.com/saltstack/community/wiki>`_
* `Salt Project Community Slack`_
* `Salt Project Community Discord`_
* `Salt Project: IRC on LiberaChat <https://web.libera.chat/#salt>`_
* `Salt Project YouTube channel <https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg>`_
* `Salt Project Twitch channel <https://www.twitch.tv/saltprojectoss>`_
@ -165,8 +166,7 @@ to the **Salt Project Community Events Calendar** on the main
`<https://saltproject.io>`_ website.
If you have additional questions, email us at saltproject@vmware.com or reach out
directly to the Community Manager, Jimmy Chunga via Slack. We'd be glad to
have you join our community!
directly to the Community Discord. We'd be glad to have you join our community!
License
@ -180,7 +180,7 @@ used by external modules.
A complete list of attributions and dependencies can be found here:
`salt/DEPENDENCIES.md <https://github.com/saltstack/salt/blob/master/DEPENDENCIES.md>`_
.. _Salt Project Community Slack: https://via.vmw.com/salt-slack
.. _Salt Project Community Discord: https://discord.com/invite/J7b7EscrAs
.. _VMware Aria Automation Config: https://www.vmware.com/products/vrealize-automation/saltstack-config.html
.. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/
.. _Open an issue: https://github.com/saltstack/salt/issues/new/choose

View file

@ -8,10 +8,10 @@ it may take a few moments for someone to reply.
`<https://web.libera.chat/#salt>`_
**SaltStack Slack** - Alongside IRC is our SaltStack Community Slack for the
**SaltStack Slack** - Alongside IRC is our SaltStack Community Discord for the
SaltStack Working groups. Use the following link to request an invitation.
`<https://via.vmw.com/salt-slack>`_
`<https://discord.com/invite/J7b7EscrAs>`_
**Mailing List** - The SaltStack community users mailing list is hosted by
Google groups. Anyone can post to ask questions about SaltStack products and

2
changelog/61001.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed an issue uninstalling packages on Windows using pkg.removed where there
are multiple versions of the same software installed

1
changelog/62501.fixed.md Normal file
View file

@ -0,0 +1 @@
Convert stdin string to bytes regardless of stdin_raw_newlines

3
changelog/64630.fixed.md Normal file
View file

@ -0,0 +1,3 @@
Fixed an intermittent issue with file.recurse where the state would
report failure even on success. Makes sure symlinks are created
after the target file is created

1
changelog/66560.fixed.md Normal file
View file

@ -0,0 +1 @@
Correct bash-completion for Debian / Ubuntu

2
changelog/66596.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed an issue with cmd.run with requirements when the shell is not the
default

1
changelog/66772.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed nacl.keygen for not yet existing sk_file or pk_file

1
changelog/66783.fixed.md Normal file
View file

@ -0,0 +1 @@
fix yaml output

2
changelog/66784.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed an issue where enabling `grain_opts` in the minion config would cause
some core grains to be overwritten.

2
changelog/66835.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Removed ``salt.utils.data.decode`` usage from the fileserver. This function was
necessary to support Python 2. This speeds up loading the list cache by 80-90x.

3
changelog/66837.fixed.md Normal file
View file

@ -0,0 +1,3 @@
Issue 66837: Fixes an issue with the `network.local_port_tcp` function
where it was not parsing the IPv4 mapped IPv6 address correctly. The
``::ffff:`` is now removed and only the IP address is returned.

View file

@ -0,0 +1 @@
Drop Arch Linux support

View file

@ -1,8 +1,8 @@
{
"amazonlinux-2-arm64": {
"ami": "ami-0c98c023fba59d522",
"ami": "ami-0aab00f54b6cddde6",
"ami_description": "CI Image of AmazonLinux 2 arm64",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20240509.1530",
"ami_name": "salt-project/ci/amazonlinux/2/arm64/20240912.2135",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -10,9 +10,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2": {
"ami": "ami-02cba95cfd7074794",
"ami": "ami-0fd6cec7bbcf52d36",
"ami_description": "CI Image of AmazonLinux 2 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240509.1530",
"ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240912.2135",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -20,9 +20,9 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023-arm64": {
"ami": "ami-0609f0e98f5a6b73d",
"ami": "ami-095e9e4757b5fca1a",
"ami_description": "CI Image of AmazonLinux 2023 arm64",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240509.1529",
"ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -30,29 +30,19 @@
"ssh_username": "ec2-user"
},
"amazonlinux-2023": {
"ami": "ami-0554a801eb6dcc42c",
"ami": "ami-002d043f1a36bf06e",
"ami_description": "CI Image of AmazonLinux 2023 x86_64",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240509.1529",
"ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "ec2-user"
},
"archlinux-lts": {
"ami": "ami-01ad78f19930b9747",
"ami_description": "CI Image of ArchLinux lts x86_64",
"ami_name": "salt-project/ci/archlinux/lts/x86_64/20240509.1530",
"arch": "x86_64",
"cloudwatch-agent-available": "false",
"instance_type": "t3a.large",
"is_windows": "false",
"ssh_username": "arch"
},
"debian-11-arm64": {
"ami": "ami-0eff227d9a94d8692",
"ami": "ami-0ff63235fce7bea1d",
"ami_description": "CI Image of Debian 11 arm64",
"ami_name": "salt-project/ci/debian/11/arm64/20240509.1529",
"ami_name": "salt-project/ci/debian/11/arm64/20240912.2135",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -60,9 +50,9 @@
"ssh_username": "admin"
},
"debian-11": {
"ami": "ami-099b2a5a1fb995166",
"ami": "ami-08685bfca48beeb67",
"ami_description": "CI Image of Debian 11 x86_64",
"ami_name": "salt-project/ci/debian/11/x86_64/20240509.1529",
"ami_name": "salt-project/ci/debian/11/x86_64/20240912.2135",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -70,9 +60,9 @@
"ssh_username": "admin"
},
"debian-12-arm64": {
"ami": "ami-0ab6b0cc8488f8880",
"ami": "ami-07d383138f04b32ba",
"ami_description": "CI Image of Debian 12 arm64",
"ami_name": "salt-project/ci/debian/12/arm64/20240509.1529",
"ami_name": "salt-project/ci/debian/12/arm64/20240912.2135",
"arch": "arm64",
"cloudwatch-agent-available": "false",
"instance_type": "m6g.large",
@ -80,9 +70,9 @@
"ssh_username": "admin"
},
"debian-12": {
"ami": "ami-0e1f5b55325249c4e",
"ami": "ami-0867ec74072fd97a0",
"ami_description": "CI Image of Debian 12 x86_64",
"ami_name": "salt-project/ci/debian/12/x86_64/20240509.1530",
"ami_name": "salt-project/ci/debian/12/x86_64/20240912.2135",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -90,9 +80,9 @@
"ssh_username": "admin"
},
"fedora-40-arm64": {
"ami": "ami-064df327a55f83953",
"ami": "ami-03be8e03c17f1abeb",
"ami_description": "CI Image of Fedora 40 arm64",
"ami_name": "salt-project/ci/fedora/40/arm64/20240509.1530",
"ami_name": "salt-project/ci/fedora/40/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -100,9 +90,9 @@
"ssh_username": "fedora"
},
"fedora-40": {
"ami": "ami-08d8dbd4f063788de",
"ami": "ami-060a59b30809758b2",
"ami_description": "CI Image of Fedora 40 x86_64",
"ami_name": "salt-project/ci/fedora/40/x86_64/20240509.1530",
"ami_name": "salt-project/ci/fedora/40/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -110,9 +100,9 @@
"ssh_username": "fedora"
},
"opensuse-15": {
"ami": "ami-0f82d5ab3015af6ad",
"ami": "ami-0aaf63315ada5365b",
"ami_description": "CI Image of Opensuse 15 x86_64",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20240509.1529",
"ami_name": "salt-project/ci/opensuse/15/x86_64/20240912.2135",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -120,9 +110,9 @@
"ssh_username": "ec2-user"
},
"photonos-4-arm64": {
"ami": "ami-0ea152c346cb8e13b",
"ami": "ami-0d425acec9d0d78a5",
"ami_description": "CI Image of PhotonOS 4 arm64",
"ami_name": "salt-project/ci/photonos/4/arm64/20240509.1530",
"ami_name": "salt-project/ci/photonos/4/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -130,9 +120,9 @@
"ssh_username": "root"
},
"photonos-4": {
"ami": "ami-09b55d0bf3a1aa7e5",
"ami": "ami-056d988807f8b586d",
"ami_description": "CI Image of PhotonOS 4 x86_64",
"ami_name": "salt-project/ci/photonos/4/x86_64/20240509.1530",
"ami_name": "salt-project/ci/photonos/4/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -140,9 +130,9 @@
"ssh_username": "root"
},
"photonos-5-arm64": {
"ami": "ami-09de4952bc9fc068a",
"ami": "ami-059f47b459d04544a",
"ami_description": "CI Image of PhotonOS 5 arm64",
"ami_name": "salt-project/ci/photonos/5/arm64/20240509.1530",
"ami_name": "salt-project/ci/photonos/5/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -150,9 +140,9 @@
"ssh_username": "root"
},
"photonos-5": {
"ami": "ami-0c3375a583643fc77",
"ami": "ami-06424daf7c85ffff0",
"ami_description": "CI Image of PhotonOS 5 x86_64",
"ami_name": "salt-project/ci/photonos/5/x86_64/20240509.1530",
"ami_name": "salt-project/ci/photonos/5/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -160,9 +150,9 @@
"ssh_username": "root"
},
"rockylinux-8-arm64": {
"ami": "ami-0662cc201cada14b8",
"ami": "ami-0a21b175629f1a793",
"ami_description": "CI Image of RockyLinux 8 arm64",
"ami_name": "salt-project/ci/rockylinux/8/arm64/20240509.1530",
"ami_name": "salt-project/ci/rockylinux/8/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -170,9 +160,9 @@
"ssh_username": "rocky"
},
"rockylinux-8": {
"ami": "ami-071ca70a907d79e05",
"ami": "ami-01032695e18f0fe85",
"ami_description": "CI Image of RockyLinux 8 x86_64",
"ami_name": "salt-project/ci/rockylinux/8/x86_64/20240509.1530",
"ami_name": "salt-project/ci/rockylinux/8/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -180,9 +170,9 @@
"ssh_username": "rocky"
},
"rockylinux-9-arm64": {
"ami": "ami-065842dfdf03a1a03",
"ami": "ami-0c9147ca5f07effc6",
"ami_description": "CI Image of RockyLinux 9 arm64",
"ami_name": "salt-project/ci/rockylinux/9/arm64/20240509.1530",
"ami_name": "salt-project/ci/rockylinux/9/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -190,9 +180,9 @@
"ssh_username": "rocky"
},
"rockylinux-9": {
"ami": "ami-09f5d6df00e99ba16",
"ami": "ami-01a72f34d198efc4a",
"ami_description": "CI Image of RockyLinux 9 x86_64",
"ami_name": "salt-project/ci/rockylinux/9/x86_64/20240509.1530",
"ami_name": "salt-project/ci/rockylinux/9/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -200,9 +190,9 @@
"ssh_username": "rocky"
},
"ubuntu-20.04-arm64": {
"ami": "ami-00171fa604b826054",
"ami": "ami-0bf8ea4c07a88d6c5",
"ami_description": "CI Image of Ubuntu 20.04 arm64",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240509.1530",
"ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -210,9 +200,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-20.04": {
"ami": "ami-07ddfbdc489064022",
"ami": "ami-08a84f7455622c3d5",
"ami_description": "CI Image of Ubuntu 20.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240509.1530",
"ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -220,9 +210,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04-arm64": {
"ami": "ami-0e6b6fc1dd298e055",
"ami": "ami-0415a2d2279277d61",
"ami_description": "CI Image of Ubuntu 22.04 arm64",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240509.1530",
"ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -230,9 +220,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-22.04": {
"ami": "ami-0736289579c0d01ba",
"ami": "ami-055513129ce06397c",
"ami_description": "CI Image of Ubuntu 22.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240509.1530",
"ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -240,9 +230,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-24.04-arm64": {
"ami": "ami-015058823f69446b3",
"ami": "ami-035ef6d54ec25b0fa",
"ami_description": "CI Image of Ubuntu 24.04 arm64",
"ami_name": "salt-project/ci/ubuntu/24.04/arm64/20240509.1530",
"ami_name": "salt-project/ci/ubuntu/24.04/arm64/20240912.2136",
"arch": "arm64",
"cloudwatch-agent-available": "true",
"instance_type": "m6g.large",
@ -250,9 +240,9 @@
"ssh_username": "ubuntu"
},
"ubuntu-24.04": {
"ami": "ami-0eb04152e7cafaaf9",
"ami": "ami-0a287b781a487ec65",
"ami_description": "CI Image of Ubuntu 24.04 x86_64",
"ami_name": "salt-project/ci/ubuntu/24.04/x86_64/20240509.1530",
"ami_name": "salt-project/ci/ubuntu/24.04/x86_64/20240912.2136",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.large",
@ -260,9 +250,9 @@
"ssh_username": "ubuntu"
},
"windows-2016": {
"ami": "ami-06026cb4d83072df5",
"ami": "ami-030cdb60764141f56",
"ami_description": "CI Image of Windows 2016 x86_64",
"ami_name": "salt-project/ci/windows/2016/x86_64/20240509.1530",
"ami_name": "salt-project/ci/windows/2016/x86_64/20240913.1756",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -270,9 +260,9 @@
"ssh_username": "Administrator"
},
"windows-2019": {
"ami": "ami-095a9256ec0e8261c",
"ami": "ami-08f10b0d4914572de",
"ami_description": "CI Image of Windows 2019 x86_64",
"ami_name": "salt-project/ci/windows/2019/x86_64/20240509.1530",
"ami_name": "salt-project/ci/windows/2019/x86_64/20240913.1756",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",
@ -280,9 +270,9 @@
"ssh_username": "Administrator"
},
"windows-2022": {
"ami": "ami-0d295c0711e513c05",
"ami": "ami-07eda52ffbd76a4c6",
"ami_description": "CI Image of Windows 2022 x86_64",
"ami_name": "salt-project/ci/windows/2022/x86_64/20240509.1530",
"ami_name": "salt-project/ci/windows/2022/x86_64/20240913.1756",
"arch": "x86_64",
"cloudwatch-agent-available": "true",
"instance_type": "t3a.xlarge",

View file

@ -7,7 +7,6 @@ release_branches:
mandatory_os_slugs:
- rockylinux-9
- amazonlinux-2023-arm64
- archlinux-lts
- photonos-5-arm64
- macos-12
- ubuntu-24.04-arm64

View file

@ -7,4 +7,4 @@ following the instructions in the
The Salt Project community can help offer advice and help troubleshoot
technical issues as you're learning about Salt. One of the best places to
talk to the community is on the
`Salt Project Slack workspace <https://saltstackcommunity.slack.com/>`_.
`Salt Project Discord Community <https://discord.com/invite/J7b7EscrAs>`_.

View file

@ -181,7 +181,7 @@ rst_prolog = """\
.. _`salt-users`: https://groups.google.com/forum/#!forum/salt-users
.. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce
.. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers
.. _`salt-slack`: https://via.vmw.com/salt-slack
.. _`salt-discord`: https://discord.com/invite/J7b7EscrAs
.. |windownload| raw:: html
<p>Python3 x86: <a

View file

@ -123,4 +123,4 @@ In addition to the mailing lists, SaltStack also provides the following resource
* `SaltStack Security Announcements <https://www.saltstack.com/security-announcements/>`__ landing page
* `SaltStack Security RSS Feed <http://www.saltstack.com/feed/?post_type=security>`__
* `SaltStack Community Slack Workspace <http://saltstackcommunity.slack.com/>`__
* `Salt Project Discord Community <https://discord.com/invite/J7b7EscrAs>`__

View file

@ -222,7 +222,7 @@ repository in your own account on GitHub and notify a SaltStack employee when
it is ready. We will add you to the Contributors team on the
`saltstack-formulas`_ organization and help you transfer the repository over.
Ping a SaltStack employee on IRC (`#salt`_ on LiberaChat), join the
``#formulas`` channel on the `salt-slack`_ (bridged to ``#saltstack-formulas``
``#formulas`` channel on the `salt-discord`_ (bridged to ``#saltstack-formulas``
on LiberaChat) or send an email to the `salt-users`_ mailing list. Note that
IRC logs are available at http://ngxbot.nginx.org/logs/%23salt/ and archives
for FreeNode (up to mid-June 2021) https://logbot-archive.s3.amazonaws.com/freenode/salt.gz

View file

@ -46,7 +46,7 @@ example):
#. Publish the docs.
#. Create release at `github`_
#. Update win-repo-ng with new salt versions.
#. Announce release is live to irc, salt-users, salt-announce and release slack
#. Announce release is live to irc, salt-users, salt-announce and release discord
community channel.
@ -79,7 +79,7 @@ for a bugfix release.
#. Publish the docs.
#. Create release at `github`_
#. Update win-repo-ng with new salt versions.
#. Announce release is live to irc, salt-users, salt-announce and release slack channel.
#. Announce release is live to irc, salt-users, salt-announce and release discord channel.
.. _`github`: https://github.com/saltstack/salt/releases
.. _`repo.saltproject.io`: https://repo.saltproject.io

View file

@ -193,12 +193,21 @@ By default, PRs run a limited subset of the test suite against the following
operating systems:
* Linux:
- Latest ``Amazon Linux Arm64``
- Latest ``Amazon Linux x86_64``
- Latest ``Debian Linux Arm64``
- Latest ``Debian Linux x86_64``
- Latest ``Photon OS Arm64``
- Latest ``Photon OS x86_64``
- Latest ``Rocky Linux Arm64``
- Latest ``Rocky Linux x86_64``
- Latest ``Amazon Linux aarch64``
- Latest ``Ubuntu LTS arm64``
- Latest ``Arch Linux x86_64``
* Latest ``Windows Server x86_64``
* Latest ``MacOS arm64``
- Latest ``Ubuntu LTS Arm64``
- Latest ``Ubuntu LTS x86_64``
* Windows Server:
- Latest ``Windows Server x86_64``
* macOS:
- Latest ``MacOS Arm64``
- Latest ``MacOS x86_64``
Optional OS additions
---------------------

View file

@ -211,7 +211,7 @@ can be found on GitHub in the `saltstack-formulas`_ collection of repositories.
If you have any questions, suggestions, or just want to chat with other people
who are using Salt, we have a very active community and we'd love to hear from
you. One of the best places to talk to the community is on the
`Salt Project Slack workspace <https://saltstackcommunity.slack.com/>`_.
`Salt Project Discord Community <https://discord.com/invite/J7b7EscrAs>`_.
In addition, by continuing to the :ref:`Orchestrate Runner <orchestrate-runner>` docs,
you can learn about the powerful orchestration of which Salt is capable.

View file

@ -4,7 +4,7 @@ DH_VERBOSE = 1
.PHONY: override_dh_strip
%:
dh $@
dh $@ --with bash-completion,systemd
# dh_auto_clean tries to invoke distutils causing failures.
override_dh_auto_clean:

View file

@ -22,7 +22,6 @@ case "$1" in
else
db_set salt-api/enabled enabled
db_set salt-api/active active
fi
;;
esac

View file

@ -5,86 +5,47 @@
# TODO: solve somehow completion for salt -G pythonversion:[tab]
# (not sure what to do with lists)
# TODO: --range[tab] -- how?
# TODO: -E --exsel[tab] -- how?
# TODO: --compound[tab] -- how?
# TODO: use history to extract some words, esp. if ${cur} is empty
# TODO: TEST EVERYTHING a lot
# TODO: TEST EVERYTING a lot
# TODO: cache results of some functions? where? how long?
# TODO: is it ok to use '--timeout 2' ?
_salt_get_grains(){
if [ "$1" = 'local' ] ; then
salt-call --log-level=error --out=txt -- grains.ls | sed 's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
salt-call --out=txt -- grains.ls | sed 's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
else
salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.ls | sed 's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
salt '*' --timeout 2 --out=txt -- grains.ls | sed 's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
fi
}
_salt_get_grain_values(){
if [ "$1" = 'local' ] ; then
salt-call --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
salt-call --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
else
salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
salt '*' --timeout 2 --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
fi
}
_salt_get_keys(){
for type in $*; do
# remove header from data:
salt-key --no-color -l $type | tail -n+2
done
}
_salt_list_functions(){
# salt-call: get all functions on this minion
# salt: get all functions on all minions
# sed: remove all array overhead and convert to newline separated list
# sort: chop out doubled entries, so overhead is minimal later during actual completion
if [ "$1" = 'local' ] ; then
salt-call --log-level=quiet --out=txt -- sys.list_functions \
| sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
| sort -u
else
salt '*' --timeout 2 --hide-timeout --log-level=quiet --out=txt -- sys.list_functions \
| sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
| sort -u
fi
}
_salt_get_coms() {
CACHE_DIR="$HOME/.cache/salt-${1}-comp-cache_functions"
local _salt_cache_functions=${SALT_COMP_CACHE_FUNCTIONS:=$CACHE_DIR}
local _salt_cache_timeout=${SALT_COMP_CACHE_TIMEOUT:='last hour'}
if [ ! -d "$(dirname ${_salt_cache_functions})" ]; then
mkdir -p "$(dirname ${_salt_cache_functions})"
fi
# Regenerate cache if timed out
if [[ "$(stat --format=%Z ${_salt_cache_functions} 2>/dev/null)" -lt "$(date --date="${_salt_cache_timeout}" +%s)" ]]; then
_salt_list_functions $1 > "${_salt_cache_functions}"
fi
# filter results, to only print the part to next dot (or end of function)
sed 's/^\('${cur}'\(\.\|[^.]*\)\)\?.*/\1/' "${_salt_cache_functions}" | sort -u
}
_salt(){
local cur prev opts _salt_grains _salt_coms pprev ppprev
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
if [ ${COMP_CWORD} -gt 2 ]; then
pprev="${COMP_WORDS[COMP_CWORD-2]}"
pprev="${COMP_WORDS[COMP_CWORD-2]}"
fi
if [ ${COMP_CWORD} -gt 3 ]; then
ppprev="${COMP_WORDS[COMP_CWORD-3]}"
ppprev="${COMP_WORDS[COMP_CWORD-3]}"
fi
opts="-h --help -d --doc --documentation --version --versions-report -c \
--config-dir= -v --verbose -t --timeout= -s --static -b --batch= \
--batch-size= -E --pcre -L --list -G --grain --grain-pcre -N \
--nodegroup -R --range -C --compound -I --pillar \
--nodegroup -R --range -C --compound -X --exsel -I --pillar \
--return= -a --auth= --eauth= --extended-auth= -T --make-token -S \
--ipcidr --out=pprint --out=yaml --out=overstatestage --out=json \
--out=raw --out=highstate --out=key --out=txt --no-color --out-indent= "
@ -98,7 +59,7 @@ _salt(){
case "${pprev}" in
-G|--grain|--grain-pcre)
if [ "${cur}" = ":" ]; then
COMPREPLY=($(compgen -W "`_salt_get_grain_values ${prev}`"))
COMPREPLY=($(compgen -W "`_salt_get_grain_values ${prev}`" ))
return 0
fi
;;
@ -126,17 +87,17 @@ _salt(){
return 0
;;
salt)
COMPREPLY=($(compgen -W "\'*\' ${opts} $(_salt_get_keys acc)" -- ${cur}))
COMPREPLY=($(compgen -W "\'*\' ${opts} `salt-key --no-color -l acc`" -- ${cur}))
return 0
;;
-E|--pcre)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
COMPREPLY=($(compgen -W "`salt-key --no-color -l acc`" -- ${cur}))
return 0
;;
-G|--grain|--grain-pcre)
COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
return 0
;;
;;
-C|--compound)
COMPREPLY=() # TODO: finish this one? how?
return 0
@ -149,18 +110,17 @@ _salt(){
COMPREPLY=($(compgen -W "1 2 3 4 5 6 7 8 9 10 15 20 30 40 50 60 70 80 90 100 120 150 200"))
return 0
;;
-X|--exsel) # TODO: finish this one? how?
return 0
;;
-N|--nodegroup)
MASTER_CONFIG='/etc/salt/master'
MASTER_CONFIG='/etc/salt/master'
COMPREPLY=($(compgen -W "`awk -F ':' 'BEGIN {print_line = 0}; /^nodegroups/ {print_line = 1;getline } print_line && /^ */ {print $1} /^[^ ]/ {print_line = 0}' <${MASTER_CONFIG}`" -- ${cur}))
return 0
;;
esac
_salt_coms=$(_salt_get_coms remote)
# If there are still dots in the suggestion, do not append space
grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
_salt_coms="$(salt '*' --timeout 2 --out=txt -- sys.list_functions | sed 's/^.*\[//' | tr -d ",']" )"
all="${opts} ${_salt_coms}"
COMPREPLY=( $(compgen -W "${all}" -- ${cur}) )
@ -202,15 +162,15 @@ _saltkey(){
case "${prev}" in
-a|--accept)
COMPREPLY=($(compgen -W "$(_salt_get_keys un rej)" -- ${cur}))
COMPREPLY=($(compgen -W "$(salt-key -l un --no-color; salt-key -l rej --no-color)" -- ${cur}))
return 0
;;
-r|--reject)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
COMPREPLY=($(compgen -W "$(salt-key -l acc --no-color)" -- ${cur}))
return 0
;;
-d|--delete)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
COMPREPLY=($(compgen -W "$(salt-key -l acc --no-color; salt-key -l un --no-color; salt-key -l rej --no-color)" -- ${cur}))
return 0
;;
-c|--config)
@ -229,7 +189,7 @@ _saltkey(){
return 0
;;
-p|--print)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
COMPREPLY=($(compgen -W "$(salt-key -l acc --no-color; salt-key -l un --no-color; salt-key -l rej --no-color)" -- ${cur}))
return 0
;;
-l|--list)
@ -237,7 +197,7 @@ _saltkey(){
return 0
;;
--accept-all)
return 0
return 0
;;
esac
COMPREPLY=($(compgen -W "${opts} " -- ${cur}))
@ -276,26 +236,22 @@ _saltcall(){
case ${prev} in
-m|--module-dirs)
COMPREPLY=( $(compgen -d ${cur} ))
return 0
;;
-l|--log-level)
COMPREPLY=( $(compgen -W "info none garbage trace warning error debug" -- ${cur}))
return 0
;;
-g|grains)
return 0
;;
-l|--log-level)
COMPREPLY=( $(compgen -W "info none garbage trace warning error debug" -- ${cur}))
return 0
;;
-g|grains)
return 0
;;
salt-call)
;;
salt-call)
COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
return 0
;;
return 0
;;
esac
_salt_coms=$(_salt_get_coms local)
# If there are still dots in the suggestion, do not append space
grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
_salt_coms="$(salt-call --out=txt -- sys.list_functions|sed 's/^.*\[//' | tr -d ",']" )"
COMPREPLY=( $(compgen -W "${opts} ${_salt_coms}" -- ${cur} ))
return 0
}
@ -311,7 +267,7 @@ _saltcp(){
opts="-t --timeout= -s --static -b --batch= --batch-size= \
-h --help --version --versions-report -c --config-dir= \
-E --pcre -L --list -G --grain --grain-pcre -N --nodegroup \
-R --range -C --compound -I --pillar \
-R --range -C --compound -X --exsel -I --pillar \
--out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
--out=highstate --out=key --out=txt --no-color --out-indent= "
if [[ "${cur}" == -* ]] ; then
@ -327,45 +283,46 @@ _saltcp(){
fi
case ${prev} in
salt-cp)
COMPREPLY=($(compgen -W "${opts} $(_salt_get_keys acc)" -- ${cur}))
return 0
;;
salt-cp)
COMPREPLY=($(compgen -W "${opts} `salt-key -l acc --no-color`" -- ${cur}))
return 0
;;
-t|--timeout)
# those numbers are just a hint
# those numbers are just a hint
COMPREPLY=($(compgen -W "2 3 4 8 10 15 20 25 30 40 60 90 120 180 240 300" -- ${cur} ))
return 0
;;
-E|--pcre)
COMPREPLY=($(compgen -W "`salt-key -l acc --no-color`" -- ${cur}))
return 0
;;
-E|--pcre)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
return 0
;;
-L|--list)
# IMPROVEMENTS ARE WELCOME
prefpart="${cur%,*},"
postpart=${cur##*,}
filt="^\($(echo ${cur}| sed 's:,:\\|:g')\)$"
helper=($(_salt_get_keys acc | grep -v "${filt}" | sed "s/^/${prefpart}/"))
COMPREPLY=($(compgen -W "${helper[*]}" -- ${cur}))
return 0
;;
-G|--grain|--grain-pcre)
;;
-L|--list)
# IMPROVEMENTS ARE WELCOME
prefpart="${cur%,*},"
postpart=${cur##*,}
filt="^\($(echo ${cur}| sed 's:,:\\|:g')\)$"
helper=($(salt-key -l acc --no-color | grep -v "${filt}" | sed "s/^/${prefpart}/"))
COMPREPLY=($(compgen -W "${helper[*]}" -- ${cur}))
return 0
;;
-G|--grain|--grain-pcre)
COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
return 0
;;
# FIXME
-R|--range)
# FIXME ??
return 0
;;
-C|--compound)
# FIXME ??
return 0
;;
-c|--config)
COMPREPLY=($(compgen -f -- ${cur}))
return 0
;;
;;
# FIXME
-R|--range)
# FIXME ??
return 0
;;
-C|--compound)
# FIXME ??
return 0
;;
-c|--config)
COMPREPLY=($(compgen -f -- ${cur}))
return 0
;;
esac
# default is using opts:

View file

@ -1,9 +1,10 @@
#! /usr/bin/dh-exec
pkg/common/logrotate/salt-common /etc/logrotate.d
pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt_common.fish /usr/share/fish/vendor_completions.d
pkg/common/salt.bash /usr/share/bash-completions/completions/salt-common.bash
pkg/common/fish-completions/salt-minion.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt-key.fish /usr/share/fish/vendor_completions.d
pkg/common/fish-completions/salt-master.fish /usr/share/fish/vendor_completions.d

View file

@ -1,2 +1,9 @@
# permissions on /var/log/salt to permit adm group ownership
salt-common: non-standard-dir-perm
# minor formatting error in table in man page
salt-common: manpage-has-errors-from-man
opt/saltstack/salt/salt-pip /usr/bin/salt-pip
opt/saltstack/salt/salt-call /usr/bin/salt-call
usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt-call

View file

@ -4,3 +4,6 @@ opt/saltstack/salt/salt-cp /usr/bin/salt-cp
opt/saltstack/salt/salt-key /usr/bin/salt-key
opt/saltstack/salt/salt-run /usr/bin/salt-run
opt/saltstack/salt/spm /usr/bin/spm
usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt
usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt-cp
usr/share/bash-completion/completions/salt-common usr/share/bash-completion/completions/salt-key

View file

@ -39,7 +39,6 @@ case "$1" in
else
db_set salt-master/enabled enabled
db_set salt-master/active active
fi
;;
esac

View file

@ -24,7 +24,6 @@ case "$1" in
else
db_set salt-minion/enabled enabled
db_set salt-minion/active active
fi
;;
esac

View file

@ -0,0 +1,37 @@
#!/bin/sh
. /usr/share/debconf/confmodule
case "$1" in
configure)
db_get salt-syndic/user
if [ "$RET" != "root" ]; then
if [ ! -e "/var/log/salt/syndic" ]; then
touch /var/log/salt/syndic
chmod 640 /var/log/salt/syndic
fi
chown $RET:$RET /var/log/salt/syndic
fi
if command -v systemctl; then
db_get salt-syndic/active
RESLT=$(echo "$RET" | cut -d ' ' -f 1)
if [ "$RESLT" != 10 ]; then
systemctl daemon-reload
if [ "$RESLT" = "active" ]; then
systemctl restart salt-syndic
fi
db_get salt-syndic/enabled
RESLT=$(echo "$RET" | cut -d ' ' -f 1)
if [ "$RESLT" = "disabled" ]; then
systemctl disable salt-syndic
else
systemctl enable salt-syndic
fi
else
systemctl daemon-reload
systemctl restart salt-syndic
systemctl enable salt-syndic
fi
fi
;;
esac

View file

@ -0,0 +1,27 @@
#!/bin/sh
. /usr/share/debconf/confmodule
case "$1" in
upgrade)
[ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt
[ -z "$SALT_USER" ] && SALT_USER=salt
[ -z "$SALT_NAME" ] && SALT_NAME="Salt"
[ -z "$SALT_GROUP" ] && SALT_GROUP=salt
# Reset permissions to fix previous installs
CUR_USER=$(ls -dl /run/salt-syndic.pid | cut -d ' ' -f 3)
CUR_GROUP=$(ls -dl /run/salt-syndic.pid | cut -d ' ' -f 4)
db_set salt-syndic/user $CUR_USER
chown -R $CUR_USER:$CUR_GROUP /var/log/salt/syndic
if command -v systemctl; then
SM_ENABLED=$(systemctl show -p UnitFileState salt-syndic | cut -d '=' -f 2)
db_set salt-syndic/enabled $SM_ENABLED
SM_ACTIVE=$(systemctl is-active salt-syndic)
db_set salt-syndic/active $SM_ACTIVE
else
db_set salt-syndic/enabled enabled
db_set salt-syndic/active active
fi
;;
esac

View file

@ -0,0 +1,17 @@
Template: salt-syndic/user
Type: string
Default: salt
Description: User for salt-syndic
User to run the salt-syndic process as
Template: salt-syndic/enabled
Type: string
Default: enabled
Description: Systemd enable state for salt-syndic
default enable state for salt-syndic systemd state
Template: salt-syndic/active
Type: string
Default: active
Description: Systemd active state for salt-syndic
default active state for salt-syndic systemd state

View file

@ -1 +0,0 @@
../common/salt.bash

375
pkg/rpm/salt.bash Normal file
View file

@ -0,0 +1,375 @@
# written by David Pravec
# - feel free to /msg alekibango on IRC if you want to talk about this file
# TODO: check if --config|-c was used and use configured config file for queries
# TODO: solve somehow completion for salt -G pythonversion:[tab]
# (not sure what to do with lists)
# TODO: --range[tab] -- how?
# TODO: --compound[tab] -- how?
# TODO: use history to extract some words, esp. if ${cur} is empty
# TODO: TEST EVERYTHING a lot
# TODO: is it ok to use '--timeout 2' ?
_salt_get_grains(){
if [ "$1" = 'local' ] ; then
salt-call --log-level=error --out=txt -- grains.ls | sed 's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
else
salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.ls | sed 's/^.*\[//' | tr -d ",']" |sed 's:\([a-z0-9]\) :\1\: :g'
fi
}
_salt_get_grain_values(){
if [ "$1" = 'local' ] ; then
salt-call --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
else
salt '*' --timeout 2 --hide-timeout --log-level=error --out=txt -- grains.item $1 |sed 's/^\S*:\s//' |grep -v '^\s*$'
fi
}
_salt_get_keys(){
for type in $*; do
# remove header from data:
salt-key --no-color -l $type | tail -n+2
done
}
_salt_list_functions(){
# salt-call: get all functions on this minion
# salt: get all functions on all minions
# sed: remove all array overhead and convert to newline separated list
# sort: chop out doubled entries, so overhead is minimal later during actual completion
if [ "$1" = 'local' ] ; then
salt-call --log-level=quiet --out=txt -- sys.list_functions \
| sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
| sort -u
else
salt '*' --timeout 2 --hide-timeout --log-level=quiet --out=txt -- sys.list_functions \
| sed "s/^.*\[//;s/[],']//g;s/ /\n/g" \
| sort -u
fi
}
_salt_get_coms() {
CACHE_DIR="$HOME/.cache/salt-${1}-comp-cache_functions"
local _salt_cache_functions=${SALT_COMP_CACHE_FUNCTIONS:=$CACHE_DIR}
local _salt_cache_timeout=${SALT_COMP_CACHE_TIMEOUT:='last hour'}
if [ ! -d "$(dirname ${_salt_cache_functions})" ]; then
mkdir -p "$(dirname ${_salt_cache_functions})"
fi
# Regenerate cache if timed out
if [[ "$(stat --format=%Z ${_salt_cache_functions} 2>/dev/null)" -lt "$(date --date="${_salt_cache_timeout}" +%s)" ]]; then
_salt_list_functions $1 > "${_salt_cache_functions}"
fi
# filter results, to only print the part to next dot (or end of function)
sed 's/^\('${cur}'\(\.\|[^.]*\)\)\?.*/\1/' "${_salt_cache_functions}" | sort -u
}
_salt(){
local cur prev opts _salt_grains _salt_coms pprev ppprev
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
if [ ${COMP_CWORD} -gt 2 ]; then
pprev="${COMP_WORDS[COMP_CWORD-2]}"
fi
if [ ${COMP_CWORD} -gt 3 ]; then
ppprev="${COMP_WORDS[COMP_CWORD-3]}"
fi
opts="-h --help -d --doc --documentation --version --versions-report -c \
--config-dir= -v --verbose -t --timeout= -s --static -b --batch= \
--batch-size= -E --pcre -L --list -G --grain --grain-pcre -N \
--nodegroup -R --range -C --compound -I --pillar \
--return= -a --auth= --eauth= --extended-auth= -T --make-token -S \
--ipcidr --out=pprint --out=yaml --out=overstatestage --out=json \
--out=raw --out=highstate --out=key --out=txt --no-color --out-indent= "
if [[ "${cur}" == -* ]] ; then
COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
return 0
fi
# 2 special cases for filling up grain values
case "${pprev}" in
-G|--grain|--grain-pcre)
if [ "${cur}" = ":" ]; then
COMPREPLY=($(compgen -W "`_salt_get_grain_values ${prev}`"))
return 0
fi
;;
esac
case "${ppprev}" in
-G|--grain|--grain-pcre)
if [ "${prev}" = ":" ]; then
COMPREPLY=( $(compgen -W "`_salt_get_grain_values ${pprev}`" -- ${cur}) )
return 0
fi
;;
esac
if [ "${cur}" = "=" ] && [[ "${prev}" == --* ]]; then
cur=""
fi
if [ "${prev}" = "=" ] && [[ "${pprev}" == --* ]]; then
prev="${pprev}"
fi
case "${prev}" in
-c|--config)
COMPREPLY=($(compgen -f -- ${cur}))
return 0
;;
salt)
COMPREPLY=($(compgen -W "\'*\' ${opts} $(_salt_get_keys acc)" -- ${cur}))
return 0
;;
-E|--pcre)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
return 0
;;
-G|--grain|--grain-pcre)
COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
return 0
;;
-C|--compound)
COMPREPLY=() # TODO: finish this one? how?
return 0
;;
-t|--timeout)
COMPREPLY=($( compgen -W "1 2 3 4 5 6 7 8 9 10 15 20 30 40 60 90 120 180" -- ${cur}))
return 0
;;
-b|--batch|--batch-size)
COMPREPLY=($(compgen -W "1 2 3 4 5 6 7 8 9 10 15 20 30 40 50 60 70 80 90 100 120 150 200"))
return 0
;;
-N|--nodegroup)
MASTER_CONFIG='/etc/salt/master'
COMPREPLY=($(compgen -W "`awk -F ':' 'BEGIN {print_line = 0}; /^nodegroups/ {print_line = 1;getline } print_line && /^ */ {print $1} /^[^ ]/ {print_line = 0}' <${MASTER_CONFIG}`" -- ${cur}))
return 0
;;
esac
_salt_coms=$(_salt_get_coms remote)
# If there are still dots in the suggestion, do not append space
grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
all="${opts} ${_salt_coms}"
COMPREPLY=( $(compgen -W "${all}" -- ${cur}) )
return 0
}
complete -F _salt salt
_saltkey(){
local cur prev opts prev pprev
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
opts="-c --config-dir= -h --help --version --versions-report -q --quiet \
-y --yes --gen-keys= --gen-keys-dir= --keysize= --key-logfile= \
-l --list= -L --list-all -a --accept= -A --accept-all \
-r --reject= -R --reject-all -p --print= -P --print-all \
-d --delete= -D --delete-all -f --finger= -F --finger-all \
--out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
--out=highstate --out=key --out=txt --no-color --out-indent= "
if [ ${COMP_CWORD} -gt 2 ]; then
pprev="${COMP_WORDS[COMP_CWORD-2]}"
fi
if [ ${COMP_CWORD} -gt 3 ]; then
ppprev="${COMP_WORDS[COMP_CWORD-3]}"
fi
if [[ "${cur}" == -* ]] ; then
COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
return 0
fi
if [ "${cur}" = "=" ] && [[ "${prev}" == --* ]]; then
cur=""
fi
if [ "${prev}" = "=" ] && [[ "${pprev}" == --* ]]; then
prev="${pprev}"
fi
case "${prev}" in
-a|--accept)
COMPREPLY=($(compgen -W "$(_salt_get_keys un rej)" -- ${cur}))
return 0
;;
-r|--reject)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
return 0
;;
-d|--delete)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
return 0
;;
-c|--config)
COMPREPLY=($(compgen -f -- ${cur}))
return 0
;;
--keysize)
COMPREPLY=($(compgen -W "2048 3072 4096 5120 6144" -- ${cur}))
return 0
;;
--gen-keys)
return 0
;;
--gen-keys-dir)
COMPREPLY=($(compgen -d -- ${cur}))
return 0
;;
-p|--print)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc un rej)" -- ${cur}))
return 0
;;
-l|--list)
COMPREPLY=($(compgen -W "pre un acc accepted unaccepted rej rejected all" -- ${cur}))
return 0
;;
--accept-all)
return 0
;;
esac
COMPREPLY=($(compgen -W "${opts} " -- ${cur}))
return 0
}
complete -F _saltkey salt-key
_saltcall(){
local cur prev opts _salt_coms pprev ppprev
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
opts="-h --help -d --doc --documentation --version --versions-report \
-m --module-dirs= -g --grains --return= --local -c --config-dir= -l --log-level= \
--out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
--out=highstate --out=key --out=txt --no-color --out-indent= "
if [ ${COMP_CWORD} -gt 2 ]; then
pprev="${COMP_WORDS[COMP_CWORD-2]}"
fi
if [ ${COMP_CWORD} -gt 3 ]; then
ppprev="${COMP_WORDS[COMP_CWORD-3]}"
fi
if [[ "${cur}" == -* ]] ; then
COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
return 0
fi
if [ "${cur}" = "=" ] && [[ ${prev} == --* ]]; then
cur=""
fi
if [ "${prev}" = "=" ] && [[ ${pprev} == --* ]]; then
prev="${pprev}"
fi
case ${prev} in
-m|--module-dirs)
COMPREPLY=( $(compgen -d ${cur} ))
return 0
;;
-l|--log-level)
COMPREPLY=( $(compgen -W "info none garbage trace warning error debug" -- ${cur}))
return 0
;;
-g|grains)
return 0
;;
salt-call)
COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
return 0
;;
esac
_salt_coms=$(_salt_get_coms local)
# If there are still dots in the suggestion, do not append space
grep "^${cur}.*\." "${_salt_coms}" &>/dev/null && compopt -o nospace
COMPREPLY=( $(compgen -W "${opts} ${_salt_coms}" -- ${cur} ))
return 0
}
complete -F _saltcall salt-call
_saltcp(){
local cur prev opts target prefpart postpart helper filt pprev ppprev
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
opts="-t --timeout= -s --static -b --batch= --batch-size= \
-h --help --version --versions-report -c --config-dir= \
-E --pcre -L --list -G --grain --grain-pcre -N --nodegroup \
-R --range -C --compound -I --pillar \
--out=pprint --out=yaml --out=overstatestage --out=json --out=raw \
--out=highstate --out=key --out=txt --no-color --out-indent= "
if [[ "${cur}" == -* ]] ; then
COMPREPLY=($(compgen -W "${opts}" -- ${cur}))
return 0
fi
if [ "${cur}" = "=" ] && [[ "${prev}" == --* ]]; then
cur=""
fi
if [ "${prev}" = "=" ] && [[ "${pprev}" == --* ]]; then
prev=${pprev}
fi
case ${prev} in
salt-cp)
COMPREPLY=($(compgen -W "${opts} $(_salt_get_keys acc)" -- ${cur}))
return 0
;;
-t|--timeout)
# those numbers are just a hint
COMPREPLY=($(compgen -W "2 3 4 8 10 15 20 25 30 40 60 90 120 180 240 300" -- ${cur} ))
return 0
;;
-E|--pcre)
COMPREPLY=($(compgen -W "$(_salt_get_keys acc)" -- ${cur}))
return 0
;;
-L|--list)
# IMPROVEMENTS ARE WELCOME
prefpart="${cur%,*},"
postpart=${cur##*,}
filt="^\($(echo ${cur}| sed 's:,:\\|:g')\)$"
helper=($(_salt_get_keys acc | grep -v "${filt}" | sed "s/^/${prefpart}/"))
COMPREPLY=($(compgen -W "${helper[*]}" -- ${cur}))
return 0
;;
-G|--grain|--grain-pcre)
COMPREPLY=($(compgen -W "$(_salt_get_grains)" -- ${cur}))
return 0
;;
# FIXME
-R|--range)
# FIXME ??
return 0
;;
-C|--compound)
# FIXME ??
return 0
;;
-c|--config)
COMPREPLY=($(compgen -f -- ${cur}))
return 0
;;
esac
# default is using opts:
COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) )
}
complete -F _saltcp salt-cp

View file

@ -283,7 +283,7 @@ install -p -m 0644 %{_salt_src}/pkg/common/logrotate/salt-common %{buildroot}%{_
# Bash completion
mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/
install -p -m 0644 %{_salt_src}/pkg/common/salt.bash %{buildroot}%{_sysconfdir}/bash_completion.d/salt.bash
install -p -m 0644 %{_salt_src}/pkg/rpm/salt.bash %{buildroot}%{_sysconfdir}/bash_completion.d/salt.bash
# Fish completion (TBD remove -v)
mkdir -p %{buildroot}%{fish_dir}
@ -441,6 +441,15 @@ if [ $1 -gt 1 ] ; then
%global _MS_CUR_GROUP %{_MS_LCUR_GROUP}
fi
%pre syndic
if [ $1 -gt 1 ] ; then
# Reset permissions to match previous installs - performing upgrade
_MS_LCUR_USER=$(ls -dl /run/salt/master | cut -d ' ' -f 3)
_MS_LCUR_GROUP=$(ls -dl /run/salt/master | cut -d ' ' -f 4)
%global _MS_CUR_USER %{_MS_LCUR_USER}
%global _MS_CUR_GROUP %{_MS_LCUR_GROUP}
fi
%pre minion
if [ $1 -gt 1 ] ; then
# Reset permissions to match previous installs - performing upgrade
@ -463,6 +472,14 @@ if [ $1 -eq 0 ] ; then
/bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || :
fi
%preun syndic
# %%systemd_preun salt-syndic.service
if [ $1 -eq 0 ] ; then
# Package removal, not upgrade
/bin/systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || :
/bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || :
fi
%preun minion
# %%systemd_preun salt-minion.service
if [ $1 -eq 0 ] ; then
@ -471,7 +488,6 @@ if [ $1 -eq 0 ] ; then
/bin/systemctl stop salt-minion.service > /dev/null 2>&1 || :
fi
%preun api
# %%systemd_preun salt-api.service
if [ $1 -eq 0 ] ; then
@ -602,6 +618,19 @@ else
fi
%posttrans syndic
if [ ! -e "/var/log/salt/syndic" ]; then
touch /var/log/salt/syndic
chmod 640 /var/log/salt/syndic
fi
if [ $1 -gt 1 ] ; then
# Reset permissions to match previous installs - performing upgrade
chown -R %{_MS_CUR_USER}:%{_MS_CUR_GROUP} /var/log/salt/syndic
else
chown -R %{_SALT_USER}:%{_SALT_GROUP} /var/log/salt/syndic
fi
%posttrans api
if [ ! -e "/var/log/salt/api" ]; then
touch /var/log/salt/api

View file

@ -158,6 +158,9 @@ LOGGING_LOGGER_CLASS = logging.getLoggerClass()
class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
ONCECACHE = set()
def __new__(cls, *args):
"""
We override `__new__` in our logging logger class in order to provide
@ -234,7 +237,13 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
stack_info=False,
stacklevel=1,
exc_info_on_loglevel=None,
once=False,
):
if once:
if str(args) in self.ONCECACHE:
return
self.ONCECACHE.add(str(args))
if extra is None:
extra = {}
@ -270,6 +279,7 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
exc_info_on_loglevel
)
)
# XXX: extra is never None
if extra is None:
extra = {"exc_info_on_loglevel": exc_info_on_loglevel}
else:

View file

@ -2513,7 +2513,7 @@ def syndic_config(
),
)
),
"user": opts.get("syndic_user", opts["user"]),
"user": opts.get("syndic_user", master_opts["user"]),
"sock_dir": os.path.join(
opts["cachedir"], opts.get("syndic_sock_dir", opts["sock_dir"])
),
@ -2521,6 +2521,7 @@ def syndic_config(
"cachedir": master_opts["cachedir"],
}
opts.update(syndic_opts)
# Prepend root_dir to other paths
prepend_root_dirs = [
"pki_dir",

View file

@ -11,7 +11,6 @@ import time
from collections.abc import Sequence
import salt.loader
import salt.utils.data
import salt.utils.files
import salt.utils.path
import salt.utils.url
@ -147,13 +146,7 @@ def check_file_list_cache(opts, form, list_cache, w_lock):
opts.get("fileserver_list_cache_time", 20),
list_cache,
)
return (
salt.utils.data.decode(
salt.payload.load(fp_).get(form, [])
),
False,
False,
)
return salt.payload.load(fp_).get(form, []), False, False
elif _lock_cache(w_lock):
# Set the w_lock and go
refresh_cache = True
@ -189,7 +182,7 @@ def check_env_cache(opts, env_cache):
try:
with salt.utils.files.fopen(env_cache, "rb") as fp_:
log.trace("Returning env cache data from %s", env_cache)
return salt.utils.data.decode(salt.payload.load(fp_))
return salt.payload.load(fp_)
except OSError:
pass
return None

View file

@ -325,7 +325,7 @@ def file_hash(load, fnd):
def _file_lists(load, form):
"""
Return a dict containing the file lists for files, dirs, emtydirs and symlinks
Return a dict containing the file lists for files, dirs, empty dirs and symlinks
"""
if "env" in load:
# "env" is not supported; Use "saltenv".

View file

@ -1276,6 +1276,7 @@ def _virtual(osdata):
"cannot execute it. Grains output might not be "
"accurate.",
command,
once=True,
)
return grains

View file

@ -11,5 +11,5 @@ def opts():
if __opts__.get("grain_opts", False) or (
isinstance(__pillar__, dict) and __pillar__.get("grain_opts", False)
):
return __opts__
return {"opts": __opts__}
return {}

View file

@ -43,6 +43,9 @@ class NamedLoaderContext(collections.abc.MutableMapping):
self.loader_context = loader_context
self.default = default
def with_default(self, default):
return NamedLoaderContext(self.name, self.loader_context, default=default)
def loader(self):
"""
The LazyLoader in the current context. This will return None if there
@ -68,10 +71,12 @@ class NamedLoaderContext(collections.abc.MutableMapping):
loader = self.loader()
if loader is None:
return self.default
if self.name == "__context__":
return loader.pack[self.name]
if self.name == loader.pack_self:
return loader
elif self.name == "__context__":
return loader.pack[self.name]
elif self.name == "__opts__":
return loader.pack[self.name]
try:
return loader.pack[self.name]
except KeyError:

View file

@ -8,3 +8,7 @@ loader_context = salt.loader.context.LoaderContext()
__file_client__ = loader_context.named_context("__file_client__", default=None)
__opts__ = loader_context.named_context("__opts__")
__context__ = loader_context.named_context("__context__")
__pillar__ = loader_context.named_context("__pillar__")
__grains__ = loader_context.named_context("__grains__")

View file

@ -1108,8 +1108,8 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
Create a salt master worker process
:param dict opts: The salt options
:param dict mkey: The user running the salt master and the AES key
:param dict key: The user running the salt master and the RSA key
:param dict mkey: The user running the salt master and the RSA key
:param dict key: The user running the salt master and the AES key
:rtype: MWorker
:return: Master worker

View file

@ -280,6 +280,7 @@ def get_proc_dir(cachedir, **kwargs):
made. Same applies if the directory is already owned by this
gid. Must be int. Works only on unix/unix like systems.
"""
# pylint: disable=logging-fstring-interpolation
fn_ = os.path.join(cachedir, "proc")
mode = kwargs.pop("mode", None)
@ -305,11 +306,13 @@ def get_proc_dir(cachedir, **kwargs):
uid = kwargs.pop("uid", -1)
gid = kwargs.pop("gid", -1)
# pylint: disable=logging-fstring-interpolation
# if uid and gid are both -1 then go ahead with
# no changes at all
if (d_stat.st_uid != uid or d_stat.st_gid != gid) and [
i for i in (uid, gid) if i != -1
]:
# pylint: disable=logging-fstring-interpolation
os.chown(fn_, uid, gid)
return fn_

View file

@ -21,7 +21,13 @@ import salt.utils.path
import salt.utils.templates
import salt.utils.url
from salt.exceptions import CommandExecutionError
from salt.loader.dunder import __file_client__
from salt.loader.dunder import (
__context__,
__file_client__,
__grains__,
__opts__,
__pillar__,
)
log = logging.getLogger(__name__)
@ -168,7 +174,7 @@ def _client():
"""
if __file_client__:
return __file_client__.value()
return salt.fileclient.get_file_client(__opts__)
return salt.fileclient.get_file_client(__opts__.value())
def _render_filenames(path, dest, saltenv, template, **kw):

View file

@ -100,7 +100,7 @@ def import_json(path):
.. code-block:: bash
salt myminion jinja.import_JSON myformula/foo.json
salt myminion jinja.import_json myformula/foo.json
"""
tmplstr = textwrap.dedent(
"""\

View file

@ -636,7 +636,7 @@ def get_computer_name():
.. code-block:: bash
salt '*' network.get_hostname
salt '*' system.get_computer_name
"""
return __salt__["network.get_hostname"]()

View file

@ -52,7 +52,7 @@ from salt.exceptions import CommandExecutionError, SaltRenderError, SaltReqTimeo
from salt.serializers.msgpack import deserialize as msgpack_deserialize
from salt.serializers.msgpack import serialize as msgpack_serialize
from salt.template import compile_template, compile_template_str
from salt.utils.odict import DefaultOrderedDict, OrderedDict
from salt.utils.odict import DefaultOrderedDict, HashableOrderedDict
log = logging.getLogger(__name__)
@ -129,11 +129,6 @@ STATE_INTERNAL_KEYWORDS = STATE_REQUISITE_KEYWORDS.union(
).union(STATE_RUNTIME_KEYWORDS)
class HashableOrderedDict(OrderedDict):
def __hash__(self):
return id(self)
def split_low_tag(tag):
"""
Take a low tag and split it back into the low dict that it came from

View file

@ -282,6 +282,7 @@ import difflib
import itertools
import logging
import os
import pathlib
import posixpath
import re
import shutil
@ -557,7 +558,26 @@ def _gen_recurse_managed_files(
managed_directories.add(mdest)
keep.add(mdest)
return managed_files, managed_directories, managed_symlinks, keep
# Sets are randomly ordered. We need to use a list so we can make sure
# symlinks are always at the end. This is necessary because the file must
# exist before we can create a symlink to it. See issue:
# https://github.com/saltstack/salt/issues/64630
new_managed_files = list(managed_files)
# Now let's move all the symlinks to the end
for link_src_relpath, _ in managed_symlinks:
for file_dest, file_src in managed_files:
# We need to convert relpath to fullpath. We're using pathlib to
# be platform-agnostic
symlink_full_path = pathlib.Path(f"{name}{os.sep}{link_src_relpath}")
file_dest_full_path = pathlib.Path(file_dest)
if symlink_full_path == file_dest_full_path:
new_managed_files.append(
new_managed_files.pop(
new_managed_files.index((file_dest, file_src))
)
)
return new_managed_files, managed_directories, managed_symlinks, keep
def _gen_keep_files(name, require, walk_d=None):
@ -4554,18 +4574,26 @@ def recurse(
or immediate subdirectories
keep_symlinks
Keep symlinks when copying from the source. This option will cause
the copy operation to terminate at the symlink. If desire behavior
similar to rsync, then set this to True. This option is not taken
in account if ``fileserver_followsymlinks`` is set to False.
Determines how symbolic links (symlinks) are handled during the copying
process. When set to ``True``, the copy operation will copy the symlink
itself, rather than the file or directory it points to. When set to
``False``, the operation will follow the symlink and copy the target
file or directory. If you want behavior similar to rsync, set this
option to ``True``.
However, if the ``fileserver_followsymlinks`` option is set to ``False``,
the ``keep_symlinks`` setting will be ignored, and symlinks will not be
copied at all.
force_symlinks
Force symlink creation. This option will force the symlink creation.
If a file or directory is obstructing symlink creation it will be
recursively removed so that symlink creation can proceed. This
option is usually not needed except in special circumstances. This
option is not taken in account if ``fileserver_followsymlinks`` is
set to False.
Controls the creation of symlinks when using ``keep_symlinks``. When set
to ``True``, it forces the creation of symlinks by removing any existing
files or directories that might be obstructing their creation. This
removal is done recursively if a directory is blocking the symlink. This
option is only used when ``keep_symlinks`` is passed and is ignored if
``fileserver_followsymlinks`` is set to ``False``.
win_owner
The owner of the symlink and directories if ``makedirs`` is True. If

View file

@ -2964,7 +2964,7 @@ def _uninstall(
try:
pkg_params = __salt__["pkg_resource.parse_targets"](
name, pkgs, normalize=normalize
name, pkgs, normalize=normalize, version=version, **kwargs
)[0]
except MinionError as exc:
return {
@ -3031,7 +3031,7 @@ def _uninstall(
new = __salt__["pkg.list_pkgs"](versions_as_list=True, **kwargs)
failed = []
for param in pkg_params:
if __grains__["os_family"] in ["Suse", "RedHat"]:
if __grains__["os_family"] in ["Suse", "RedHat", "Windows"]:
# Check if the package version set to be removed is actually removed:
if param in new and not pkg_params[param]:
failed.append(param)

View file

@ -182,12 +182,12 @@ def keygen(sk_file=None, pk_file=None, **kwargs):
with salt.utils.files.fopen(sk_file, "rb") as keyf:
sk = salt.utils.stringutils.to_unicode(keyf.read()).rstrip("\n")
sk = base64.b64decode(sk)
kp = nacl.public.PublicKey(sk)
kp = nacl.public.PrivateKey(sk)
with salt.utils.files.fopen(pk_file, "wb") as keyf:
keyf.write(base64.b64encode(kp.encode()))
keyf.write(base64.b64encode(kp.public_key.encode()))
return f"saved pk_file: {pk_file}"
kp = nacl.public.PublicKey.generate()
kp = nacl.public.PrivateKey.generate()
with salt.utils.files.fopen(sk_file, "wb") as keyf:
keyf.write(base64.b64encode(kp.encode()))
if salt.utils.platform.is_windows():
@ -200,7 +200,7 @@ def keygen(sk_file=None, pk_file=None, **kwargs):
# chmod 0600 file
os.chmod(sk_file, 1536)
with salt.utils.files.fopen(pk_file, "wb") as keyf:
keyf.write(base64.b64encode(kp.encode()))
keyf.write(base64.b64encode(kp.public_key.encode()))
return f"saved sk_file:{sk_file} pk_file: {pk_file}"

View file

@ -1740,7 +1740,13 @@ def _netlink_tool_remote_on(port, which_end):
continue
if which_end == "local_port" and int(local_port) != int(port):
continue
remotes.add(remote_host.strip("[]"))
# Interpret IPv4-mapped IPv6 addresses as IPv4 (strip prefix)
remote_host = remote_host.strip("[]").lower()
if remote_host.startswith("::ffff:"):
remote_host = remote_host[7:]
remotes.add(remote_host)
if valid is False:
remotes = None

View file

@ -62,3 +62,8 @@ class DefaultOrderedDict(OrderedDict):
return "DefaultOrderedDict({}, {})".format(
self.default_factory, super().__repr__()
)
class HashableOrderedDict(OrderedDict):
def __hash__(self):
return id(self)

View file

@ -33,9 +33,8 @@ class TimedProc:
if not self.stdin_raw_newlines:
# Translate a newline submitted as '\n' on the CLI to an actual
# newline character.
self.stdin = salt.utils.stringutils.to_bytes(
self.stdin.replace("\\n", "\n")
)
self.stdin = self.stdin.replace("\\n", "\n")
self.stdin = salt.utils.stringutils.to_bytes(self.stdin)
kwargs["stdin"] = subprocess.PIPE
if not self.with_communicate:

View file

@ -13,7 +13,7 @@ import collections
import yaml # pylint: disable=blacklisted-import
import salt.utils.context
from salt.utils.odict import OrderedDict
from salt.utils.odict import HashableOrderedDict, OrderedDict
try:
from yaml import CDumper as Dumper
@ -71,7 +71,9 @@ def represent_undefined(dumper, data):
OrderedDumper.add_representer(OrderedDict, represent_ordereddict)
OrderedDumper.add_representer(HashableOrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(HashableOrderedDict, represent_ordereddict)
SafeOrderedDumper.add_representer(None, represent_undefined)
OrderedDumper.add_representer(

View file

@ -78,6 +78,18 @@ class CMDModuleTest(ModuleCase):
),
"four\nfive",
)
self.assertEqual(
self.run_function(
"cmd.run", ["cat"], stdin="one\\ntwo", stdin_raw_newlines=False
),
"one\ntwo",
)
self.assertEqual(
self.run_function(
"cmd.run", ["cat"], stdin="one\\ntwo", stdin_raw_newlines=True
),
"one\\ntwo",
)
self.assertEqual(
self.run_function(
"cmd.run", ['echo "a=b" | sed -e s/=/:/g'], python_shell=True

View file

@ -1,5 +1,6 @@
import logging
import shutil
import sys
import pytest
from saltfactories.utils.functional import Loaders
@ -70,6 +71,17 @@ def minion_opts(
},
}
)
if sys.platform.startswith("win"):
# We need to set up winrepo on Windows
minion_config_overrides.update(
{
"winrepo_source_dir": "salt://winrepo_ng",
"winrepo_dir_ng": str(state_tree / "winrepo_ng"),
"winrepo_dir": str(state_tree / "winrepo"),
}
)
factory = salt_factories.salt_minion_daemon(
minion_id,
defaults=minion_config_defaults or None,

View file

@ -0,0 +1,50 @@
import salt.loader.context
import salt.loader.lazy
import salt.utils.files
import tests.support.helpers
def test_opts_dunder_opts_without_import(tmp_path):
"""
Test __opts__ without being imported.
When a loaded module uses __opts__ but does not import it from
salt.loader.dunder the __opts__ object will be a dictionary.
"""
opts = {"optimization_order": [0, 1, 2]}
with salt.utils.files.fopen(tmp_path / "mymod.py", "w") as fp:
fp.write(
tests.support.helpers.dedent(
"""
def mymethod():
return __opts__
"""
)
)
loader = salt.loader.lazy.LazyLoader([tmp_path], opts)
assert type(loader["mymod.mymethod"]()) == dict
def test_opts_dunder_opts_with_import(tmp_path):
"""
Test __opts__ when imported.
When a loaded module uses __opts__ by importing it from
salt.loader.dunder the __opts__ object will be a NamedLoaderContext.
"""
opts = {"optimization_order": [0, 1, 2]}
with salt.utils.files.fopen(tmp_path / "mymod.py", "w") as fp:
fp.write(
tests.support.helpers.dedent(
"""
from salt.loader.dunder import __opts__
def optstype():
return type(__opts__)
def opts():
return __opts__
"""
)
)
loader = salt.loader.lazy.LazyLoader([tmp_path], opts)
assert loader["mymod.optstype"]() == salt.loader.context.NamedLoaderContext
assert loader["mymod.opts"]() == opts

View file

@ -14,6 +14,7 @@ import salt.modules.gpg as gpg
import salt.modules.pkg_resource as pkg_resource
import salt.utils.files
import salt.utils.stringutils
from salt.loader.dunder import __opts__
from tests.support.mock import Mock, patch
pytestmark = [
@ -86,7 +87,7 @@ def configure_loader_modules(minion_opts, grains):
},
gpg: {},
cp: {
"__opts__": minion_opts,
"__opts__": __opts__.with_default(minion_opts),
},
config: {
"__opts__": minion_opts,

View file

@ -5,10 +5,12 @@ import shutil
import signal
import subprocess
import textwrap
import time
import pytest
import salt.utils.files
from salt.exceptions import CommandExecutionError
pytestmark = [
pytest.mark.skip_unless_on_linux,
@ -76,7 +78,13 @@ def setup_teardown_vars(file, service, system):
file.remove("/etc/machine-info")
if _systemd_timesyncd_available_:
res = service.start("systemd-timesyncd")
try:
res = service.start("systemd-timesyncd")
except CommandExecutionError:
# We possibly did too many restarts in too short time
# Wait 10s (default systemd timeout) and try again
time.sleep(10)
res = service.start("systemd-timesyncd")
assert res

View file

@ -29,7 +29,7 @@ def pkg(modules):
def test_refresh_db(pkg, pkg_def_contents, state_tree, minion_opts):
assert len(pkg.get_package_info("my-software")) == 0
repo_dir = state_tree / "win" / "repo-ng"
repo_dir = state_tree / "winrepo_ng"
with pytest.helpers.temp_file("my-software.sls", pkg_def_contents, repo_dir):
pkg.refresh_db()
assert len(pkg.get_package_info("my-software")) == 1

View file

@ -7,6 +7,60 @@ pytestmark = [
]
@pytest.fixture(scope="module")
def symlink_scenario_1(state_tree):
# Create directory structure
dir_name = "symlink_scenario_1"
source_dir = state_tree / dir_name
if not source_dir.is_dir():
source_dir.mkdir()
source_file = source_dir / "source_file.txt"
source_file.write_text("This is the source file...")
symlink_file = source_dir / "symlink"
symlink_file.symlink_to(source_file)
yield dir_name
@pytest.fixture(scope="module")
def symlink_scenario_2(state_tree):
# Create directory structure
dir_name = "symlink_scenario_2"
source_dir = state_tree / dir_name / "test"
if not source_dir.is_dir():
source_dir.mkdir(parents=True)
test1 = source_dir / "test1"
test2 = source_dir / "test2"
test3 = source_dir / "test3"
test_link = source_dir / "test"
test1.touch()
test2.touch()
test3.touch()
test_link.symlink_to(test3)
yield dir_name
@pytest.fixture(scope="module")
def symlink_scenario_3(state_tree):
# Create directory structure
dir_name = "symlink_scenario_3"
source_dir = state_tree / dir_name
if not source_dir.is_dir():
source_dir.mkdir(parents=True)
# Create a file with the same name but is not a symlink
source_file = source_dir / "not_a_symlink" / "symlink"
source_file.parent.mkdir(parents=True)
source_file.write_text("This is the source file...")
# Create other fluff files
just_a_file = source_dir / "just_a_file.txt"
just_a_file.touch()
dummy_file = source_dir / "notasymlink"
dummy_file.touch()
# Create symlink to source with the same name
symlink_file = source_dir / "symlink"
symlink_file.symlink_to(source_file)
yield dir_name
@pytest.mark.parametrize("test", (False, True))
def test_recurse(file, tmp_path, grail, test):
"""
@ -249,3 +303,148 @@ def test_issue_2726_mode_kwarg(modules, tmp_path, state_tree):
ret = modules.state.template_str("\n".join(good_template))
for state_run in ret:
assert state_run.result is True
def test_issue_64630_keep_symlinks_true(file, symlink_scenario_1, tmp_path):
"""
Make sure that symlinks are created and that there isn't an error when there
are no conflicting target files
"""
target_dir = tmp_path / symlink_scenario_1 # Target for the file.recurse state
target_file = target_dir / "source_file.txt"
target_symlink = target_dir / "symlink"
ret = file.recurse(
name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=True
)
assert ret.result is True
assert target_dir.exists()
assert target_file.is_file()
assert target_symlink.is_symlink()
def test_issue_64630_keep_symlinks_false(file, symlink_scenario_1, tmp_path):
"""
Make sure that symlinks are created as files and that there isn't an error
"""
target_dir = tmp_path / symlink_scenario_1 # Target for the file.recurse state
target_file = target_dir / "source_file.txt"
target_symlink = target_dir / "symlink"
ret = file.recurse(
name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=False
)
assert ret.result is True
assert target_dir.exists()
assert target_file.is_file()
assert target_symlink.is_file()
assert target_file.read_text() == target_symlink.read_text()
def test_issue_64630_keep_symlinks_conflicting_force_symlinks_false(
file, symlink_scenario_1, tmp_path
):
"""
Make sure that symlinks are not created when there is a conflict. The state
should return False
"""
target_dir = tmp_path / symlink_scenario_1 # Target for the file.recurse state
target_file = target_dir / "source_file.txt"
target_symlink = target_dir / "symlink"
# Create the conflicting file
target_symlink.parent.mkdir(parents=True)
target_symlink.touch()
assert target_symlink.is_file()
ret = file.recurse(
name=str(target_dir),
source=f"salt://{target_dir.name}",
keep_symlinks=True,
force_symlinks=False,
)
# We expect it to fail
assert ret.result is False
# And files not to be created properly
assert target_dir.exists()
assert target_file.is_file()
assert target_symlink.is_file()
def test_issue_64630_keep_symlinks_conflicting_force_symlinks_true(
file, symlink_scenario_1, tmp_path
):
"""
Make sure that symlinks are created when there is a conflict with an
existing file.
"""
target_dir = tmp_path / symlink_scenario_1 # Target for the file.recurse state
target_file = target_dir / "source_file.txt"
target_symlink = target_dir / "symlink"
# Create the conflicting file
target_symlink.parent.mkdir(parents=True)
target_symlink.touch()
assert target_symlink.is_file()
ret = file.recurse(
name=str(target_dir),
source=f"salt://{target_dir.name}",
force_symlinks=True,
keep_symlinks=True,
)
assert ret.result is True
assert target_dir.exists()
assert target_file.is_file()
assert target_symlink.is_symlink()
def test_issue_64630_keep_symlinks_similar_names(file, symlink_scenario_3, tmp_path):
"""
Make sure that symlinks are created when there is a file that shares part
of the name of the actual symlink file. I'm not sure what I'm testing here
as I couldn't really get this to fail either way
"""
target_dir = tmp_path / symlink_scenario_3 # Target for the file.recurse state
# symlink target, but has the same name as the symlink itself
target_source = target_dir / "not_a_symlink" / "symlink"
target_symlink = target_dir / "symlink"
decoy_file = target_dir / "notasymlink"
just_a_file = target_dir / "just_a_file.txt"
ret = file.recurse(
name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=True
)
assert ret.result is True
assert target_dir.exists()
assert target_source.is_file()
assert decoy_file.is_file()
assert just_a_file.is_file()
assert target_symlink.is_symlink()
def test_issue_62117(file, symlink_scenario_2, tmp_path):
target_dir = tmp_path / symlink_scenario_2 / "test"
target_file_1 = target_dir / "test1"
target_file_2 = target_dir / "test2"
target_file_3 = target_dir / "test3"
target_symlink = target_dir / "test"
ret = file.recurse(
name=str(target_dir),
source=f"salt://{target_dir.parent.name}/test",
clean=True,
keep_symlinks=True,
)
assert ret.result is True
assert target_dir.exists()
assert target_file_1.is_file()
assert target_file_2.is_file()
assert target_file_3.is_file()
assert target_symlink.is_symlink()

View file

@ -20,12 +20,17 @@ pytestmark = [
pytest.mark.slow_test,
pytest.mark.skip_if_not_root,
pytest.mark.destructive_test,
pytest.mark.windows_whitelisted,
pytest.mark.timeout_unless_on_windows(240),
]
@pytest.fixture(scope="module", autouse=True)
def refresh_db(grains, modules):
if salt.utils.platform.is_windows():
modules.winrepo.update_git_repos()
modules.pkg.refresh_db()
# If this is Arch Linux, check if pacman is in use by another process
@ -43,7 +48,7 @@ def refresh_db(grains, modules):
def refresh_keys(grains, modules):
if grains["os_family"] == "Arch":
# We should be running this periodically when building new test runner
# images, otherwise this could take several minuets to complete.
# images, otherwise this could take several minutes to complete.
proc = subprocess.run(["pacman-key", "--refresh-keys"], check=False)
if proc.returncode != 0:
pytest.fail("pacman-key --refresh-keys command failed.")
@ -53,7 +58,7 @@ def refresh_keys(grains, modules):
def PKG_TARGETS(grains):
_PKG_TARGETS = ["figlet", "sl"]
if grains["os"] == "Windows":
_PKG_TARGETS = ["vlc", "putty"]
_PKG_TARGETS = ["npp_x64", "winrar"]
elif grains["os"] == "Amazon":
if grains["osfinger"] == "Amazon Linux-2023":
_PKG_TARGETS = ["lynx", "gnuplot-minimal"]
@ -108,7 +113,12 @@ def PKG_32_TARGETS(grains):
_PKG_32_TARGETS = []
if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon":
if grains["os"] == "CentOS":
_PKG_32_TARGETS.append("xz-devel.i686")
if grains["osmajorrelease"] == 5:
_PKG_32_TARGETS = ["xz-devel.i386"]
else:
_PKG_32_TARGETS.append("xz-devel.i686")
elif grains["os"] == "Windows":
_PKG_32_TARGETS = ["npp", "putty"]
if not _PKG_32_TARGETS:
pytest.skip("No 32 bit packages have been specified for testing")
return _PKG_32_TARGETS
@ -198,6 +208,23 @@ def latest_version(ctx, modules):
return run_command
@pytest.fixture(scope="function")
def install_7zip(modules):
try:
modules.pkg.install(name="7zip", version="22.01.00.0")
modules.pkg.install(name="7zip", version="19.00.00.0")
versions = modules.pkg.version("7zip")
assert "19.00.00.0" in versions
assert "22.01.00.0" in versions
yield
finally:
modules.pkg.remove(name="7zip", version="19.00.00.0")
modules.pkg.remove(name="7zip", version="22.01.00.0")
versions = modules.pkg.version("7zip")
assert "19.00.00.0" not in versions
assert "22.01.00.0" not in versions
@pytest.mark.requires_salt_modules("pkg.version")
@pytest.mark.requires_salt_states("pkg.installed", "pkg.removed")
@pytest.mark.slow_test
@ -261,7 +288,8 @@ def test_pkg_003_installed_multipkg(caplog, PKG_TARGETS, modules, states, grains
try:
ret = states.pkg.installed(name=None, pkgs=PKG_TARGETS, refresh=False)
assert ret.result is True
assert "WARNING" not in caplog.text
if not salt.utils.platform.is_windows():
assert "WARNING" not in caplog.text
finally:
ret = states.pkg.removed(name=None, pkgs=PKG_TARGETS)
assert ret.result is True
@ -1084,3 +1112,17 @@ def test_pkg_purged_with_removed_pkg(grains, PKG_TARGETS, states, modules):
"installed": {},
"removed": {target: {"new": "", "old": version}},
}
@pytest.mark.skip_unless_on_windows()
def test_pkg_removed_with_version_multiple(install_7zip, modules, states):
"""
This tests removing a specific version of a package when multiple versions
are installed. This is specific to Windows. The only version I could find
that allowed multiple installs of differing versions was 7zip, so we'll use
that.
"""
ret = states.pkg.removed(name="7zip", version="19.00.00.0")
assert ret.result is True
current = modules.pkg.version("7zip")
assert "22.01.00.0" in current

View file

@ -7,7 +7,7 @@ import salt.utils.versions as versions
try:
import salt.utils.yamllint as yamllint
YAMLLINT_AVAILABLE = True
YAMLLINT_AVAILABLE = yamllint.has_yamllint()
except ImportError:
YAMLLINT_AVAILABLE = False

View file

@ -5,7 +5,7 @@ pytestmark = [
]
def test_salt_api(api_request, salt_master, install_salt):
def test_salt_api(api_request, install_salt, salt_master):
"""
Test running a command against the salt api
"""

View file

@ -9,8 +9,8 @@ pytestmark = [
@pytest.fixture
def salt_systemd_setup(
salt_call_cli,
install_salt,
salt_call_cli,
):
"""
Fixture to set systemd for salt packages to enabled and active
@ -31,7 +31,7 @@ def salt_systemd_setup(
@pytest.mark.skip_if_binaries_missing("ufw")
def test_salt_ufw(salt_systemd_setup, salt_call_cli, install_salt):
def test_salt_ufw(salt_systemd_setup, install_salt, salt_call_cli):
"""
Test salt.ufw for Debian/Ubuntu salt-master
"""

View file

@ -2,6 +2,7 @@ import os
import pathlib
import subprocess
import sys
import time
import packaging.version
import psutil
@ -15,8 +16,8 @@ pytestmark = [
@pytest.fixture
def salt_systemd_setup(
salt_call_cli,
install_salt,
salt_call_cli,
):
"""
Fixture to set systemd for salt packages to enabled and active
@ -67,8 +68,12 @@ def pkg_paths_salt_user():
"/var/log/salt/master",
"/var/log/salt/api",
"/var/log/salt/key",
"/var/log/salt/syndic",
"/var/cache/salt/master",
"/var/run/salt/master",
"/run/salt-master.pid",
"/run/salt-syndic.pid",
"/run/salt-api.pid",
]
@ -83,10 +88,16 @@ def pkg_paths_salt_user_exclusions():
return paths
def test_salt_user_master(salt_master, install_salt):
def test_salt_user_master(install_salt, salt_master):
"""
Test the correct user is running the Salt Master
"""
for count in range(0, 30):
if salt_master.is_running():
break
else:
time.sleep(2)
assert salt_master.is_running()
match = False
@ -158,6 +169,7 @@ def test_pkg_paths(
pkg_paths,
pkg_paths_salt_user,
pkg_paths_salt_user_exclusions,
salt_call_cli,
):
"""
Test package paths ownership
@ -174,6 +186,7 @@ def test_pkg_paths(
assert pkg_path.exists()
for dirpath, sub_dirs, files in os.walk(pkg_path):
path = pathlib.Path(dirpath)
# Directories owned by salt:salt or their subdirs/files
if (
str(path) in pkg_paths_salt_user or str(path) in salt_user_subdirs
@ -206,10 +219,10 @@ def test_pkg_paths(
@pytest.mark.skip_if_binaries_missing("logrotate")
def test_paths_log_rotation(
install_salt,
salt_master,
salt_minion,
salt_call_cli,
install_salt,
pkg_tests_account,
):
"""
@ -401,3 +414,7 @@ def test_paths_log_rotation(
bkup_count += 1
assert ret.returncode == 0
# ensure leave salt_master running
salt_master.start()
assert salt_master.is_running() is True

View file

@ -1,6 +1,7 @@
import os.path
import pathlib
import subprocess
import time
import pytest
from pytestskipmarkers.utils import platform
@ -35,6 +36,7 @@ def test_salt_version(version, install_salt):
@pytest.mark.skip_on_windows
@pytest.mark.skip_on_darwin
def test_salt_versions_report_master(install_salt):
"""
Test running --versions-report on master
@ -53,17 +55,33 @@ def test_salt_versions_report_master(install_salt):
@pytest.mark.skip_on_windows
def test_salt_versions_report_minion(salt_cli, salt_call_cli, salt_minion):
def test_salt_versions_report_minion(salt_cli, salt_call_cli, salt_master, salt_minion):
"""
Test running test.versions_report on minion
"""
# Make sure the minion is running
for count in range(0, 30):
if salt_minion.is_running():
break
else:
time.sleep(2)
assert salt_minion.is_running()
# Make sure the master is running
for count in range(0, 30):
if salt_master.is_running():
break
else:
time.sleep(2)
assert salt_master.is_running()
# Make sure we can ping the minion ...
ret = salt_cli.run(
"--timeout=300", "test.ping", minion_tgt=salt_minion.id, _timeout=300
"--timeout=600", "test.ping", minion_tgt=salt_minion.id, _timeout=600
)
assert ret.returncode == 0
assert ret.data is True
ret = salt_cli.run(
@ -77,6 +95,8 @@ def test_salt_versions_report_minion(salt_cli, salt_call_cli, salt_minion):
ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"])
@pytest.mark.skip_on_windows
@pytest.mark.skip_on_darwin
@pytest.mark.parametrize(
"binary", ["master", "cloud", "syndic", "minion", "call", "api"]
)
@ -132,8 +152,7 @@ def test_symlinks_created(version, symlink, install_salt):
ret.stdout.matcher.fnmatch_lines([f"*{version}*"])
@pytest.mark.skip_on_windows
@pytest.mark.skip_on_darwin
@pytest.mark.skip_unless_on_linux
@pytest.mark.skip_if_binaries_missing("rpmdev-vercmp")
def test_compare_pkg_versions_redhat_rc(version, install_salt):
"""

View file

@ -0,0 +1,20 @@
"""
tests.pytests.unit.grains.test_opts
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import salt.grains.opts as opts
from tests.support.mock import patch
def test_grain_opts_does_not_overwrite_core_grains(tmp_path):
"""
Tests that enabling grain_opts doesn't overwrite the core grains
See: https://github.com/saltstack/salt/issues/66784
"""
dunder_opts = {"grain_opts": True}
with patch.object(opts, "__opts__", dunder_opts, create=True):
with patch.object(opts, "__pillar__", {}, create=True):
assert opts.opts() == {"opts": dunder_opts}

View file

@ -10,6 +10,10 @@ import salt.utils.platform
from salt.exceptions import CommandExecutionError
from tests.support.mock import MagicMock, patch
TARGET = []
if os.environ.get("VENV_PIP_TARGET"):
TARGET = ["--target", os.environ.get("VENV_PIP_TARGET")]
class FakeFopen:
def __init__(self, filename):
@ -97,6 +101,7 @@ def test_install_frozen_app(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
pkg,
]
mock.assert_called_with(
@ -118,6 +123,7 @@ def test_install_source_app(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
pkg,
]
mock.assert_called_with(
@ -138,6 +144,7 @@ def test_fix4361(python_binary):
"install",
"--requirement",
"requirements.txt",
*TARGET,
]
mock.assert_called_with(
expected_cmd,
@ -164,7 +171,7 @@ def test_install_multiple_editable(python_binary):
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
expected = [*python_binary, "install"]
expected = [*python_binary, "install", *TARGET]
for item in editables:
expected.extend(["--editable", item])
@ -200,7 +207,7 @@ def test_install_multiple_pkgs_and_editables(python_binary):
"git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting",
]
expected = [*python_binary, "install"]
expected = [*python_binary, "install", *TARGET]
expected.extend(pkgs)
for item in editables:
expected.extend(["--editable", item])
@ -236,6 +243,7 @@ def test_install_multiple_pkgs_and_editables(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
pkgs[0],
"--editable",
editables[0],
@ -263,7 +271,7 @@ def test_issue5940_install_multiple_pip_mirrors(python_binary):
expected = [*python_binary, "install", "--use-mirrors"]
for item in mirrors:
expected.extend(["--mirrors", item])
expected.append("pep8")
expected = [*expected, *TARGET, "pep8"]
# Passing mirrors as a list
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
@ -295,6 +303,7 @@ def test_issue5940_install_multiple_pip_mirrors(python_binary):
"--use-mirrors",
"--mirrors",
mirrors[0],
*TARGET,
"pep8",
]
@ -322,7 +331,7 @@ def test_install_with_multiple_find_links(python_binary):
expected = [*python_binary, "install"]
for item in find_links:
expected.extend(["--find-links", item])
expected.append(pkg)
expected = [*expected, *TARGET, pkg]
# Passing mirrors as a list
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
@ -365,6 +374,7 @@ def test_install_with_multiple_find_links(python_binary):
"install",
"--find-links",
find_links[0],
*TARGET,
pkg,
]
@ -430,6 +440,7 @@ def test_install_cached_requirements_used(python_binary):
"install",
"--requirement",
"my_cached_reqs",
*TARGET,
]
mock.assert_called_with(
expected,
@ -486,6 +497,7 @@ def test_install_log_argument_in_resulting_command(python_binary, tmp_path):
"install",
"--log",
log_path,
*TARGET,
pkg,
]
mock.assert_called_with(
@ -516,7 +528,7 @@ def test_install_timeout_argument_in_resulting_command(python_binary):
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, timeout=10)
mock.assert_called_with(
expected + [10, pkg],
expected + [10, *TARGET, pkg],
saltenv="base",
runas=None,
use_vt=False,
@ -528,7 +540,7 @@ def test_install_timeout_argument_in_resulting_command(python_binary):
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, timeout="10")
mock.assert_called_with(
expected + ["10", pkg],
expected + ["10", *TARGET, pkg],
saltenv="base",
runas=None,
use_vt=False,
@ -552,6 +564,7 @@ def test_install_index_url_argument_in_resulting_command(python_binary):
"install",
"--index-url",
index_url,
*TARGET,
pkg,
]
mock.assert_called_with(
@ -574,6 +587,7 @@ def test_install_extra_index_url_argument_in_resulting_command(python_binary):
"install",
"--extra-index-url",
extra_index_url,
*TARGET,
pkg,
]
mock.assert_called_with(
@ -590,7 +604,7 @@ def test_install_no_index_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, no_index=True)
expected = [*python_binary, "install", "--no-index", pkg]
expected = [*python_binary, "install", "--no-index", *TARGET, pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -606,7 +620,7 @@ def test_install_build_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, build=build)
expected = [*python_binary, "install", "--build", build, pkg]
expected = [*python_binary, "install", "--build", build, *TARGET, pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -641,6 +655,7 @@ def test_install_download_argument_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
"--download",
download,
pkg,
@ -659,7 +674,7 @@ def test_install_no_download_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, no_download=True)
expected = [*python_binary, "install", "--no-download", pkg]
expected = [*python_binary, "install", *TARGET, "--no-download", pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -686,6 +701,7 @@ def test_install_download_cache_dir_arguments_in_resulting_command(python_binary
expected = [
*python_binary,
"install",
*TARGET,
cmd_arg,
download_cache,
pkg,
@ -715,7 +731,7 @@ def test_install_source_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, source=source)
expected = [*python_binary, "install", "--source", source, pkg]
expected = [*python_binary, "install", *TARGET, "--source", source, pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -734,6 +750,7 @@ def test_install_exists_action_argument_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
"--exists-action",
action,
pkg,
@ -756,7 +773,7 @@ def test_install_install_options_argument_in_resulting_command(python_binary):
install_options = ["--exec-prefix=/foo/bar", "--install-scripts=/foo/bar/bin"]
pkg = "pep8"
expected = [*python_binary, "install"]
expected = [*python_binary, "install", *TARGET]
for item in install_options:
expected.extend(["--install-option", item])
expected.append(pkg)
@ -792,6 +809,7 @@ def test_install_install_options_argument_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
"--install-option",
install_options[0],
pkg,
@ -809,7 +827,7 @@ def test_install_global_options_argument_in_resulting_command(python_binary):
global_options = ["--quiet", "--no-user-cfg"]
pkg = "pep8"
expected = [*python_binary, "install"]
expected = [*python_binary, "install", *TARGET]
for item in global_options:
expected.extend(["--global-option", item])
expected.append(pkg)
@ -845,6 +863,7 @@ def test_install_global_options_argument_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
"--global-option",
global_options[0],
pkg,
@ -863,7 +882,7 @@ def test_install_upgrade_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, upgrade=True)
expected = [*python_binary, "install", "--upgrade", pkg]
expected = [*python_binary, "install", *TARGET, "--upgrade", pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -881,6 +900,7 @@ def test_install_force_reinstall_argument_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
"--force-reinstall",
pkg,
]
@ -901,6 +921,7 @@ def test_install_ignore_installed_argument_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
"--ignore-installed",
pkg,
]
@ -918,7 +939,7 @@ def test_install_no_deps_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, no_deps=True)
expected = [*python_binary, "install", "--no-deps", pkg]
expected = [*python_binary, "install", *TARGET, "--no-deps", pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -933,7 +954,7 @@ def test_install_no_install_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, no_install=True)
expected = [*python_binary, "install", "--no-install", pkg]
expected = [*python_binary, "install", *TARGET, "--no-install", pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -949,7 +970,7 @@ def test_install_proxy_argument_in_resulting_command(python_binary):
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
pip.install(pkg, proxy=proxy)
expected = [*python_binary, "install", "--proxy", proxy, pkg]
expected = [*python_binary, "install", "--proxy", proxy, *TARGET, pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -976,7 +997,7 @@ def test_install_proxy_false_argument_in_resulting_command(python_binary):
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
with patch.dict(pip.__opts__, config_mock):
pip.install(pkg, proxy=proxy)
expected = [*python_binary, "install", pkg]
expected = [*python_binary, "install", *TARGET, pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -1007,6 +1028,7 @@ def test_install_global_proxy_in_resulting_command(python_binary):
"install",
"--proxy",
proxy,
*TARGET,
pkg,
]
mock.assert_called_with(
@ -1027,6 +1049,7 @@ def test_install_multiple_requirements_arguments_in_resulting_command(python_bin
expected = [*python_binary, "install"]
for item in cached_reqs:
expected.extend(["--requirement", item])
expected.extend(TARGET)
# Passing option as a list
mock = MagicMock(return_value={"retcode": 0, "stdout": ""})
@ -1063,6 +1086,7 @@ def test_install_multiple_requirements_arguments_in_resulting_command(python_bin
"install",
"--requirement",
cached_reqs[0],
*TARGET,
]
mock.assert_called_with(
expected,
@ -1083,6 +1107,7 @@ def test_install_extra_args_arguments_in_resulting_command(python_binary):
expected = [
*python_binary,
"install",
*TARGET,
pkg,
"--latest-pip-kwarg",
"param",
@ -1598,7 +1623,7 @@ def test_install_pre_argument_in_resulting_command(python_binary):
with patch.dict(pip.__salt__, {"cmd.run_all": mock}):
with patch("salt.modules.pip.version", MagicMock(return_value="1.3")):
pip.install(pkg, pre_releases=True)
expected = [*python_binary, "install", pkg]
expected = [*python_binary, "install", *TARGET, pkg]
mock.assert_called_with(
expected,
saltenv="base",
@ -1614,7 +1639,7 @@ def test_install_pre_argument_in_resulting_command(python_binary):
):
with patch("salt.modules.pip._get_pip_bin", MagicMock(return_value=["pip"])):
pip.install(pkg, pre_releases=True)
expected = ["pip", "install", "--pre", pkg]
expected = ["pip", "install", *TARGET, "--pre", pkg]
mock_run_all.assert_called_with(
expected,
saltenv="base",

View file

@ -6,6 +6,7 @@ import logging
import pytest
import salt.loader.dunder
import salt.modules.config as config
import salt.modules.cp as cp
import salt.modules.pkg_resource as pkg_resource
@ -57,7 +58,7 @@ def configure_loader_modules(minion_opts):
opts = minion_opts
opts["master_uri"] = "localhost"
return {
cp: {"__opts__": opts},
cp: {"__opts__": salt.loader.dunder.__opts__.with_default(opts)},
win_pkg: {
"_get_latest_package_version": MagicMock(return_value="3.03"),
"_get_package_info": MagicMock(return_value=pkg_info),

View file

@ -13,7 +13,7 @@ try:
import salt.modules.yaml
import salt.utils.yamllint
YAMLLINT_AVAILABLE = True
YAMLLINT_AVAILABLE = salt.utils.yamllint.has_yamllint()
except ImportError:
YAMLLINT_AVAILABLE = False

View file

@ -0,0 +1,48 @@
import logging
import os
import pathlib
import pytest
import salt.states.file as filestate
from tests.support.mock import MagicMock, patch
log = logging.getLogger(__name__)
@pytest.fixture
def configure_loader_modules():
return {filestate: {"__salt__": {}, "__opts__": {}, "__env__": "base"}}
def test__gen_recurse_managed_files():
"""
Test _gen_recurse_managed_files to make sure it puts symlinks at the end of the list of files.
"""
target_dir = pathlib.Path(f"{os.sep}some{os.sep}path{os.sep}target")
cp_list_master = MagicMock(
return_value=[
"target/symlink",
"target/just_a_file.txt",
"target/not_a_symlink/symlink",
"target/notasymlink",
],
)
cp_list_master_symlinks = MagicMock(
return_value={
"target/symlink": f"{target_dir}{os.sep}not_a_symlink{os.sep}symlink"
}
)
patch_salt = {
"cp.list_master": cp_list_master,
"cp.list_master_symlinks": cp_list_master_symlinks,
}
with patch.dict(filestate.__salt__, patch_salt):
files, dirs, links, keep = filestate._gen_recurse_managed_files(
name=str(target_dir), source=f"salt://{target_dir.name}", keep_symlinks=True
)
expected = (
f"{os.sep}some{os.sep}path{os.sep}target{os.sep}symlink",
"salt://target/symlink?saltenv=base",
)
assert files[-1] == expected

View file

@ -11,6 +11,7 @@ import salt.modules.yumpkg as yumpkg
import salt.states.beacon as beaconstate
import salt.states.pkg as pkg
import salt.utils.state as state_utils
from salt.loader.dunder import __opts__
from salt.utils.event import SaltEvent
from tests.support.mock import MagicMock, patch
@ -21,7 +22,7 @@ log = logging.getLogger(__name__)
def configure_loader_modules(minion_opts):
return {
cp: {
"__opts__": minion_opts,
"__opts__": __opts__.with_default(minion_opts),
},
pkg: {
"__env__": "base",

View file

@ -75,9 +75,7 @@ def test_file_server_url_escape(tmp_path):
opts = {
"fileserver_backend": ["roots"],
"extension_modules": "",
"optimization_order": [
0,
],
"optimization_order": [0, 1],
"file_roots": {
"base": [fileroot],
},
@ -102,9 +100,7 @@ def test_file_server_serve_url_escape(tmp_path):
opts = {
"fileserver_backend": ["roots"],
"extension_modules": "",
"optimization_order": [
0,
],
"optimization_order": [0, 1],
"file_roots": {
"base": [fileroot],
},

View file

@ -4,6 +4,10 @@ import salt.utils.msgpack
from tests.support.mock import MagicMock, patch
@pytest.mark.skipif(
salt.utils.msgpack.version < (1, 0, 0),
reason="Test requires msgpack version >= 1.0.0",
)
def test_load_encoding(tmp_path):
"""
test when using msgpack version >= 1.0.0 we

View file

@ -73,6 +73,20 @@ def test_keygen_keyfile(test_keygen):
ret = nacl.keygen(keyfile=fpath)
assert f"saved pk_file: {fpath}.pub" == ret
with salt.utils.files.fopen(str(fpath) + ".pub", "rb") as rfh:
assert test_keygen["pk"] == rfh.read()
salt.utils.files.remove(str(fpath) + ".pub")
def test_keygen_nonexistent_sk_file():
"""
test nacl.keygen function
with nonexistent/new sk_file
"""
with pytest.helpers.temp_file("test_keygen_sk_file") as fpath:
salt.utils.files.remove(str(fpath))
ret = nacl.keygen(sk_file=str(fpath))
assert f"saved sk_file:{fpath} pk_file: {fpath}.pub" == ret
salt.utils.files.remove(str(fpath) + ".pub")

View file

@ -7,11 +7,12 @@ import pytest
import salt.exceptions
import salt.utils.network
import salt.utils.network as network
import salt.utils.platform
from salt._compat import ipaddress
from tests.support.mock import MagicMock, create_autospec, mock_open, patch
pytestmark = [
pytest.mark.skip_on_windows,
pytest.mark.windows_whitelisted,
]
@ -722,13 +723,13 @@ def test_netlink_tool_remote_on_a():
with patch("salt.utils.platform.is_linux", return_value=True):
with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT):
remotes = network._netlink_tool_remote_on("4506", "local_port")
assert remotes == {"192.168.122.177", "::ffff:127.0.0.1"}
assert remotes == {"192.168.122.177", "127.0.0.1"}
def test_netlink_tool_remote_on_b():
with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT):
remotes = network._netlink_tool_remote_on("4505", "remote_port")
assert remotes == {"127.0.0.1", "::ffff:1.2.3.4"}
assert remotes == {"127.0.0.1", "1.2.3.4"}
def test_openbsd_remotes_on():
@ -1431,7 +1432,11 @@ def test_isportopen_false():
def test_isportopen():
ret = network.isportopen("127.0.0.1", "22")
if salt.utils.platform.is_windows():
port = "135"
else:
port = "22"
ret = network.isportopen("127.0.0.1", port)
assert ret == 0
@ -1445,13 +1450,19 @@ def test_get_socket():
assert ret.type == socket.SOCK_STREAM
# @pytest.mark.skip_on_windows(reason="Do not run on Windows")
def test_ip_to_host(grains):
if salt.utils.platform.is_windows():
hostname = socket.gethostname()
else:
hostname = "localhost"
ret = network.ip_to_host("127.0.0.1")
if grains["oscodename"] == "Photon":
if grains.get("oscodename") == "Photon":
# Photon returns this for IPv4
assert ret == "ipv6-localhost"
else:
assert ret == "localhost"
assert ret == hostname
ret = network.ip_to_host("2001:a71::1")
assert ret is None
@ -1461,22 +1472,22 @@ def test_ip_to_host(grains):
assert ret == "localhost6"
elif grains["os_family"] == "Debian":
if grains["osmajorrelease"] == 12:
assert ret == "localhost"
assert ret == hostname
else:
assert ret == "ip6-localhost"
elif grains["os_family"] == "RedHat":
if grains["oscodename"] == "Photon":
assert ret == "ipv6-localhost"
else:
assert ret == "localhost"
assert ret == hostname
elif grains["os_family"] == "Arch":
if grains.get("osmajorrelease", None) is None:
# running doesn't have osmajorrelease grains
assert ret == "localhost"
assert ret == hostname
else:
assert ret == "ip6-localhost"
else:
assert ret == "localhost"
assert ret == hostname
@pytest.mark.parametrize(
@ -1509,7 +1520,7 @@ def test_rpad_ipv4_network(addr, expected):
def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict):
with patch(
"salt.utils.network.linux_interfaces",
"salt.utils.network.interfaces",
MagicMock(return_value=linux_interfaces_dict),
):
hw_addrs = network.hw_addr("eth0")
@ -1534,7 +1545,7 @@ def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict):
def test_interface_and_ip(linux_interfaces_dict):
with patch(
"salt.utils.network.linux_interfaces",
"salt.utils.network.interfaces",
MagicMock(return_value=linux_interfaces_dict),
):
expected = [
@ -1560,7 +1571,7 @@ def test_interface_and_ip(linux_interfaces_dict):
def test_subnets(linux_interfaces_dict):
with patch(
"salt.utils.network.linux_interfaces",
"salt.utils.network.interfaces",
MagicMock(return_value=linux_interfaces_dict),
):
ret = network.subnets()
@ -1583,14 +1594,14 @@ def test_in_subnet(caplog):
def test_ip_addrs(linux_interfaces_dict):
with patch(
"salt.utils.network.linux_interfaces",
"salt.utils.network.interfaces",
MagicMock(return_value=linux_interfaces_dict),
):
ret = network.ip_addrs("eth0")
assert ret == ["10.10.10.56"]
with patch(
"salt.utils.network.linux_interfaces",
"salt.utils.network.interfaces",
MagicMock(return_value=linux_interfaces_dict),
):
ret = network.ip_addrs6("eth0")

View file

@ -57,21 +57,20 @@ def test_gen_hash_crypt(algorithm, expected):
"""
Test gen_hash with crypt library
"""
with patch("salt.utils.pycrypto.methods", {}):
ret = salt.utils.pycrypto.gen_hash(
crypt_salt=expected["salt"], password=passwd, algorithm=algorithm
)
assert ret == expected["hashed"]
ret = salt.utils.pycrypto.gen_hash(
crypt_salt=expected["salt"], password=passwd, algorithm=algorithm
)
assert ret == expected["hashed"]
ret = salt.utils.pycrypto.gen_hash(
crypt_salt=expected["badsalt"], password=passwd, algorithm=algorithm
)
assert ret != expected["hashed"]
ret = salt.utils.pycrypto.gen_hash(
crypt_salt=expected["badsalt"], password=passwd, algorithm=algorithm
)
assert ret != expected["hashed"]
ret = salt.utils.pycrypto.gen_hash(
crypt_salt=None, password=passwd, algorithm=algorithm
)
assert ret != expected["hashed"]
ret = salt.utils.pycrypto.gen_hash(
crypt_salt=None, password=passwd, algorithm=algorithm
)
assert ret != expected["hashed"]
@pytest.mark.skipif(not salt.utils.pycrypto.HAS_CRYPT, reason="crypt not available")

View file

@ -2,7 +2,11 @@
Unit tests for salt.utils.yamldumper
"""
from collections import OrderedDict, defaultdict
import salt.utils.yamldumper
from salt.utils.context import NamespacedDictWrapper
from salt.utils.odict import HashableOrderedDict
from tests.support.unit import TestCase
@ -35,3 +39,80 @@ class YamlDumperTestCase(TestCase):
salt.utils.yamldumper.safe_dump(data, default_flow_style=False)
== "foo: bar\n"
)
def test_yaml_ordered_dump(self):
"""
Test yaml.dump with OrderedDict
"""
data = OrderedDict([("foo", "bar"), ("baz", "qux")])
exp_yaml = "{foo: bar, baz: qux}\n"
assert (
salt.utils.yamldumper.dump(data, Dumper=salt.utils.yamldumper.OrderedDumper)
== exp_yaml
)
def test_yaml_safe_ordered_dump(self):
"""
Test yaml.safe_dump with OrderedDict
"""
data = OrderedDict([("foo", "bar"), ("baz", "qux")])
exp_yaml = "{foo: bar, baz: qux}\n"
assert salt.utils.yamldumper.safe_dump(data) == exp_yaml
def test_yaml_indent_safe_ordered_dump(self):
"""
Test yaml.dump with IndentedSafeOrderedDumper
"""
data = OrderedDict([("foo", ["bar", "baz"]), ("qux", "quux")])
exp_yaml = "foo:\n- bar\n- baz\nqux: quux\n"
assert (
salt.utils.yamldumper.dump(
data,
Dumper=salt.utils.yamldumper.IndentedSafeOrderedDumper,
default_flow_style=False,
)
== exp_yaml
)
def test_yaml_defaultdict_dump(self):
"""
Test yaml.dump with defaultdict
"""
data = defaultdict(list)
data["foo"].append("bar")
exp_yaml = "foo: [bar]\n"
assert salt.utils.yamldumper.safe_dump(data) == exp_yaml
def test_yaml_namespaced_dict_wrapper_dump(self):
"""
Test yaml.dump with NamespacedDictWrapper
"""
data = NamespacedDictWrapper({"test": {"foo": "bar"}}, "test")
exp_yaml = (
"!!python/object/new:salt.utils.context.NamespacedDictWrapper\n"
"dictitems: {foo: bar}\n"
"state:\n"
" _NamespacedDictWrapper__dict:\n"
" test: {foo: bar}\n"
" pre_keys: !!python/tuple [test]\n"
)
assert salt.utils.yamldumper.dump(data) == exp_yaml
def test_yaml_undefined_dump(self):
"""
Test yaml.safe_dump with None
"""
data = {"foo": None}
exp_yaml = "{foo: null}\n"
assert salt.utils.yamldumper.safe_dump(data) == exp_yaml
def test_yaml_hashable_ordered_dict_dump(self):
"""
Test yaml.dump with HashableOrderedDict
"""
data = HashableOrderedDict([("foo", "bar"), ("baz", "qux")])
exp_yaml = "{foo: bar, baz: qux}\n"
assert (
salt.utils.yamldumper.dump(data, Dumper=salt.utils.yamldumper.OrderedDumper)
== exp_yaml
)

View file

@ -1011,7 +1011,6 @@ def get_pkg_downloads_matrix(ctx: Context):
"photon",
)
linux_skip_pkg_download_tests = (
"archlinux-lts",
"opensuse-15",
"windows",
)

Some files were not shown because too many files have changed in this diff Show more