mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #65239 from s0undt3ch/hotfix/merge-forward
[master] Merge 3006.x into master
This commit is contained in:
commit
27f90f7239
72 changed files with 1528 additions and 352 deletions
6
.github/workflows/build-deps-onedir.yml
vendored
6
.github/workflows/build-deps-onedir.yml
vendored
|
@ -19,14 +19,12 @@ on:
|
|||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
relenv-version:
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: 0.13.2
|
||||
description: The version of relenv to use
|
||||
python-version:
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: 3.10.12
|
||||
description: The version of python to use with relenv
|
||||
|
||||
env:
|
||||
|
|
1
.github/workflows/build-macos-packages.yml
vendored
1
.github/workflows/build-macos-packages.yml
vendored
|
@ -110,6 +110,7 @@ jobs:
|
|||
DEV_APP_CERT: "${{ secrets.MAC_SIGN_DEV_APP_CERT }}"
|
||||
DEV_INSTALL_CERT: "${{ secrets.MAC_SIGN_DEV_INSTALL_CERT }}"
|
||||
APPLE_ACCT: "${{ secrets.MAC_SIGN_APPLE_ACCT }}"
|
||||
APPLE_TEAM_ID: "${{ secrets.MAC_SIGN_APPLE_TEAM_ID }}"
|
||||
APP_SPEC_PWD: "${{ secrets.MAC_SIGN_APP_SPEC_PWD }}"
|
||||
run: |
|
||||
tools pkg build macos --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
|
||||
|
|
6
.github/workflows/build-salt-onedir.yml
vendored
6
.github/workflows/build-salt-onedir.yml
vendored
|
@ -19,14 +19,12 @@ on:
|
|||
type: string
|
||||
description: Seed used to invalidate caches
|
||||
relenv-version:
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: 0.13.2
|
||||
description: The version of relenv to use
|
||||
python-version:
|
||||
required: false
|
||||
required: true
|
||||
type: string
|
||||
default: 3.10.12
|
||||
description: The version of python to use with relenv
|
||||
|
||||
env:
|
||||
|
|
2
.github/workflows/triage.yml
vendored
2
.github/workflows/triage.yml
vendored
|
@ -22,7 +22,7 @@ jobs:
|
|||
issues: write
|
||||
pull-requests: read # for dawidd6/action-download-artifact to query commit hash
|
||||
name: Triage New Issue
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }}
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
|
|
12
CHANGELOG.md
12
CHANGELOG.md
|
@ -7,6 +7,18 @@ Versions are `MAJOR.PATCH`.
|
|||
|
||||
# Changelog
|
||||
|
||||
## Salt v3005.3 (2023-09-14)
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix __env__ and improve cache cleaning see more info at pull #65017. (#65002)
|
||||
|
||||
|
||||
### Security
|
||||
|
||||
- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c (#65167)
|
||||
|
||||
|
||||
## 3006.3 (2023-09-06)
|
||||
|
||||
|
||||
|
|
|
@ -578,6 +578,9 @@ commit message, it's usually a good idea to add other information, such as
|
|||
This will also help you out, because when you go to create the PR it
|
||||
will automatically insert the body of your commit messages.
|
||||
|
||||
See the `changelog <https://docs.saltproject.io/en/latest/topics/development/changelog.html>`__
|
||||
docs for more information.
|
||||
|
||||
|
||||
Pull request time!
|
||||
------------------
|
||||
|
|
|
@ -121,7 +121,7 @@ Security advisories
|
|||
Keep an eye on the Salt Project
|
||||
`Security Announcements <https://saltproject.io/security-announcements/>`_
|
||||
landing page. Salt Project recommends subscribing to the
|
||||
`Salt Project Security RSS feed <https://saltproject.io/feed/?post_type=security>`_
|
||||
`Salt Project Security RSS feed <https://saltproject.io/security-announcements/index.xml>`_
|
||||
to receive notification when new information is available regarding security
|
||||
announcements.
|
||||
|
||||
|
|
1
changelog/64888.fixed.md
Normal file
1
changelog/64888.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed grp.getgrall() in utils/user.py causing performance issues
|
1
changelog/64953.fixed.md
Normal file
1
changelog/64953.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fix user.list_groups omits remote groups via sssd, etc.
|
1
changelog/65029.removed.md
Normal file
1
changelog/65029.removed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Tech Debt - support for pysss removed due to functionality addition in Python 3.3
|
1
changelog/65093.fixed.md
Normal file
1
changelog/65093.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Only attempt to create a keys directory when `--gen-keys` is passed to the `salt-key` CLI
|
1
changelog/65179.fixed.md
Normal file
1
changelog/65179.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Ensure __kwarg__ is preserved when checking for kwargs. This change affects proxy minions when used with Deltaproxy, which had kwargs popped when targeting multiple minions id.
|
1
changelog/65210.fixed.md
Normal file
1
changelog/65210.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixes traceback when state id is an int in a reactor SLS file.
|
|
@ -197,3 +197,12 @@ How to access python binary
|
|||
|
||||
The python library is available in the install directory of the onedir package. For example
|
||||
on linux the default location would be ``/opt/saltstack/salt/bin/python3``.
|
||||
|
||||
Testing the packages
|
||||
====================
|
||||
|
||||
If you want to test your built packages, or any other collection of salt packages post 3006.0, follow :ref:`this guide <pkging-testing>`
|
||||
|
||||
.. toctree::
|
||||
|
||||
testing
|
||||
|
|
157
doc/topics/packaging/testing.rst
Normal file
157
doc/topics/packaging/testing.rst
Normal file
|
@ -0,0 +1,157 @@
|
|||
.. _pkging-testing:
|
||||
|
||||
================
|
||||
Testing packages
|
||||
================
|
||||
|
||||
The package test suite
|
||||
======================
|
||||
|
||||
The salt repo provides a test suite for testing basic functionality of our
|
||||
packages at ``<repo-root>/pkg/tests/``. You can run the install, upgrade, and
|
||||
downgrade tests. These tests run automatically on most PRs that are submitted
|
||||
against Salt.
|
||||
|
||||
|
||||
.. warning::
|
||||
|
||||
These tests make destructive changes to your system because they install the
|
||||
built packages onto the system. They may also install older versions in the
|
||||
case of upgrades or downgrades. To prevent destructive changes, run the
|
||||
tests in an isolated system, preferably a virtual machine.
|
||||
|
||||
Setup
|
||||
=====
|
||||
In order to run the package tests, the `relenv
|
||||
<https://github.com/saltstack/relative-environment-for-python>`_ onedir and
|
||||
built packages need to be placed in the correct locations.
|
||||
|
||||
* Place all salt packages for the applicable testing version in
|
||||
``<repo-root>/pkg/artifacts/``.
|
||||
* The onedir must be located under ``<repo-root>/artifacts/``.
|
||||
* Additionally, to ensure complete parity with Salt's CI/CD suite, place the
|
||||
``nox`` virtual environment in ``<repo-root>/.nox/test-pkgs-onedir``.
|
||||
|
||||
The following are a few ways this can be accomplished easily.
|
||||
|
||||
You can ensure parity by installing the package test suite through a few
|
||||
possible methods:
|
||||
|
||||
* Using ``tools``
|
||||
* Downloading individually
|
||||
|
||||
Using ``tools``
|
||||
---------------
|
||||
Salt has preliminary support for setting up the package test suite in the
|
||||
``tools`` command suite that is located under ``<repo-root>/tools/testsuite/``.
|
||||
This method requires the Github CLI tool ``gh`` (https://cli.github.com/) to be properly configured for
|
||||
interaction with the salt repo.
|
||||
|
||||
#. Install the dependencies using this command:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install -r requirements/static/ci/py{python_version}/tools.txt
|
||||
|
||||
#. Download and extract the artifacts with this ``tools`` command:
|
||||
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tools ts setup --platform {linux|darwin|windows} --slug
|
||||
<operating-system-slug> --pr <pr-number> --pkg
|
||||
|
||||
The most common use case is to test the packages built on a CI/CD run for a
|
||||
given PR. To see the possible options for each argument, and other ways to
|
||||
utilize this command, use the following:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
tools ts setup -h
|
||||
|
||||
.. warning::
|
||||
|
||||
You can only download artifacts from finished workflow runs. This is something
|
||||
imposed by the GitHub API.
|
||||
To download artifacts from a running workflow run, you either have to wait for
|
||||
the finish or cancel it.
|
||||
|
||||
Downloading individually
|
||||
------------------------
|
||||
If the ``tools ts setup`` command doesn't work, you can download, unzip, and
|
||||
place the artifacts in the correct locations manually. Typically, you want to
|
||||
test packages built on a CI/CD run for a given PR. This guide explains how to
|
||||
set up for running the package tests using those artifacts. An analogous process
|
||||
can be performed for artifacts from nightly builds.
|
||||
|
||||
#. Find and download the artifacts:
|
||||
|
||||
Under the summary page for the most recent actions run for that PR, there is
|
||||
a list of available artifacts from that run that can be downloaded. Download
|
||||
the package artifacts by finding
|
||||
``salt-<major>.<minor>+<number>.<sha>-<arch>-<pkg-type>``. For example, the
|
||||
amd64 deb packages might look like:
|
||||
``salt-3006.2+123.01234567890-x86_64-deb``.
|
||||
|
||||
The onedir artifact will look like
|
||||
``salt-<major>.<minor>+<number>.<sha>-onedir-<platform>-<arch>.tar.xz``. For
|
||||
instance, the macos x86_64 onedir may have the name
|
||||
``salt-3006.2+123.01234567890-onedir-darwin-x86_64.tar.xz``.
|
||||
|
||||
.. note::
|
||||
|
||||
Windows onedir artifacts have ``.zip`` extensions instead of ``tar.xz``
|
||||
|
||||
While it is optional, it is recommended to download the ``nox`` session
|
||||
artifact as well. This will have the form of
|
||||
``nox-<os-name>-test-pkgs-onedir-<arch>``. The amd64 Ubuntu 20.04 nox
|
||||
artifact may look like ``nox-ubuntu-20.04-test-pkgs-onedir-x86_64``.
|
||||
|
||||
#. Place the artifacts in the correct location:
|
||||
|
||||
Unzip the packages and place them in ``<repo-root>/pkg/artifacts/``.
|
||||
|
||||
You must unzip and untar the onedir packages and place them in
|
||||
``<repo-root>/artifacts/``. Windows onedir requires an additional unzip
|
||||
action. If you set it up correctly, the ``<repo-root>/artifacts/salt``
|
||||
directory then contains the uncompressed onedir files.
|
||||
|
||||
Additionally, decompress the ``nox`` artifact and place it under
|
||||
``<repo-root>/.nox/``.
|
||||
|
||||
Running the tests
|
||||
=================
|
||||
You can run the test suite run if all the artifacts are in the correct location.
|
||||
|
||||
.. note::
|
||||
|
||||
You need root access to run the test artifacts. Run all nox commands at the
|
||||
root of the salt repo and as the root user.
|
||||
|
||||
#. Install ``nox``:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pip install nox
|
||||
|
||||
#. Run the install tests:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
nox -e test-pkgs-onedir -- install
|
||||
|
||||
#. Run the upgrade or downgrade tests:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
nox -e test-pkgs-onedir -- upgrade --prev-version <previous-version>
|
||||
|
||||
You can run the downgrade tests in the same way, replacing ``upgrade`` with
|
||||
``downgrade``.
|
||||
|
||||
.. note::
|
||||
|
||||
If you are testing upgrades or downgrades and classic packages are
|
||||
available for your system, replace ``upgrade`` or
|
||||
``downgrade`` with ``upgrade-classic`` or ``downgrade-classic``
|
||||
respectively to test against those versions.
|
19
doc/topics/releases/3005.3.rst
Normal file
19
doc/topics/releases/3005.3.rst
Normal file
|
@ -0,0 +1,19 @@
|
|||
.. _release-3005-3:
|
||||
|
||||
=========================
|
||||
Salt 3005.3 Release Notes
|
||||
=========================
|
||||
|
||||
Version 3005.3 is a Bug fix release for :ref:`3005 <release-3005>`.
|
||||
|
||||
|
||||
Changed
|
||||
-------
|
||||
|
||||
- Fix __env__ and improve cache cleaning see more info at pull #65017. (#65002)
|
||||
|
||||
|
||||
Security
|
||||
--------
|
||||
|
||||
- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c (#65167)
|
|
@ -53,10 +53,7 @@ def test_salt_downgrade(salt_call_cli, install_salt):
|
|||
ret.stdout.strip().split()[1]
|
||||
) < packaging.version.parse(install_salt.artifact_version)
|
||||
|
||||
# Windows does not keep the extras directory around in the same state
|
||||
# TODO: Fix this problem in windows installers
|
||||
# TODO: Fix this problem in macos installers
|
||||
if is_downgrade_to_relenv and not (platform.is_windows() or platform.is_darwin()):
|
||||
if is_downgrade_to_relenv:
|
||||
new_py_version = install_salt.package_python_version()
|
||||
if new_py_version == original_py_version:
|
||||
# test pip install after a downgrade
|
||||
|
|
|
@ -272,7 +272,7 @@ netaddr==0.8.0
|
|||
# pyeapi
|
||||
netmiko==4.2.0
|
||||
# via napalm
|
||||
netutils==1.4.1
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
# via netmiko
|
||||
|
@ -485,7 +485,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
|
||||
# portend
|
||||
|
@ -510,13 +510,13 @@ transitions==0.9.0
|
|||
# via junos-eznc
|
||||
ttp-templates==0.3.5
|
||||
# via napalm
|
||||
ttp==0.9.4
|
||||
ttp==0.9.5
|
||||
# via
|
||||
# napalm
|
||||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
|
||||
# napalm
|
||||
|
|
|
@ -142,11 +142,11 @@ sphinxcontrib-serializinghtml==1.1.5
|
|||
# via sphinx
|
||||
sphinxcontrib-spelling==8.0.0
|
||||
# via -r requirements/static/ci/docs.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# portend
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.10/linux.txt
|
||||
# pydantic
|
||||
|
|
|
@ -267,7 +267,7 @@ netaddr==0.8.0
|
|||
# pyeapi
|
||||
netmiko==4.2.0
|
||||
# via napalm
|
||||
netutils==1.4.1
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
# via netmiko
|
||||
|
@ -477,7 +477,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
|
||||
# portend
|
||||
|
@ -502,13 +502,13 @@ transitions==0.9.0
|
|||
# via junos-eznc
|
||||
ttp-templates==0.3.5
|
||||
# via napalm
|
||||
ttp==0.9.4
|
||||
ttp==0.9.5
|
||||
# via
|
||||
# napalm
|
||||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
|
||||
# napalm
|
||||
|
|
|
@ -292,7 +292,7 @@ netaddr==0.8.0
|
|||
# pyeapi
|
||||
netmiko==4.2.0
|
||||
# via napalm
|
||||
netutils==1.4.1
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
# via netmiko
|
||||
|
@ -539,7 +539,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
# portend
|
||||
|
@ -548,7 +548,7 @@ textfsm==1.1.3
|
|||
# napalm
|
||||
# netmiko
|
||||
# ntc-templates
|
||||
timelib==0.2.5
|
||||
timelib==0.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
# -r requirements/static/pkg/linux.in
|
||||
|
@ -564,7 +564,7 @@ transitions==0.9.0
|
|||
# via junos-eznc
|
||||
ttp-templates==0.3.5
|
||||
# via napalm
|
||||
ttp==0.9.4
|
||||
ttp==0.9.5
|
||||
# via
|
||||
# napalm
|
||||
# ttp-templates
|
||||
|
@ -572,7 +572,7 @@ twilio==8.2.2
|
|||
# via -r requirements/static/ci/linux.in
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/linux.txt
|
||||
# napalm
|
||||
|
|
|
@ -109,7 +109,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-salt-factories==1.0.0rc17
|
||||
# via -r requirements/static/ci/pkgtests-windows.in
|
||||
pytest-shell-utilities==1.7.0
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
# via
|
||||
|
|
|
@ -106,7 +106,7 @@ pytest-helpers-namespace==2021.12.29
|
|||
# pytest-shell-utilities
|
||||
pytest-salt-factories==1.0.0rc17
|
||||
# via -r requirements/static/ci/pkgtests.in
|
||||
pytest-shell-utilities==1.7.0
|
||||
pytest-shell-utilities==1.8.0
|
||||
# via pytest-salt-factories
|
||||
pytest-skip-markers==1.4.1
|
||||
# via
|
||||
|
|
|
@ -441,7 +441,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# portend
|
||||
|
@ -459,7 +459,7 @@ tornado==6.3.2
|
|||
# -r requirements/base.txt
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.10/windows.txt
|
||||
# pydantic
|
||||
|
|
|
@ -270,7 +270,7 @@ netaddr==0.8.0
|
|||
# pyeapi
|
||||
netmiko==4.2.0
|
||||
# via napalm
|
||||
netutils==1.4.1
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
# via netmiko
|
||||
|
@ -483,7 +483,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
|
||||
# portend
|
||||
|
@ -506,13 +506,13 @@ transitions==0.9.0
|
|||
# via junos-eznc
|
||||
ttp-templates==0.3.5
|
||||
# via napalm
|
||||
ttp==0.9.4
|
||||
ttp==0.9.5
|
||||
# via
|
||||
# napalm
|
||||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
|
||||
# napalm
|
||||
|
|
|
@ -142,11 +142,11 @@ sphinxcontrib-serializinghtml==1.1.5
|
|||
# via sphinx
|
||||
sphinxcontrib-spelling==8.0.0
|
||||
# via -r requirements/static/ci/docs.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# portend
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.11/linux.txt
|
||||
# pydantic
|
||||
|
|
|
@ -265,7 +265,7 @@ netaddr==0.8.0
|
|||
# pyeapi
|
||||
netmiko==4.2.0
|
||||
# via napalm
|
||||
netutils==1.4.1
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
# via netmiko
|
||||
|
@ -475,7 +475,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
|
||||
# portend
|
||||
|
@ -498,13 +498,13 @@ transitions==0.9.0
|
|||
# via junos-eznc
|
||||
ttp-templates==0.3.5
|
||||
# via napalm
|
||||
ttp==0.9.4
|
||||
ttp==0.9.5
|
||||
# via
|
||||
# napalm
|
||||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
|
||||
# napalm
|
||||
|
|
|
@ -288,7 +288,7 @@ netaddr==0.8.0
|
|||
# pyeapi
|
||||
netmiko==4.2.0
|
||||
# via napalm
|
||||
netutils==1.4.1
|
||||
netutils==1.6.0
|
||||
# via napalm
|
||||
ntc-templates==3.4.0
|
||||
# via netmiko
|
||||
|
@ -535,7 +535,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||
# portend
|
||||
|
@ -558,7 +558,7 @@ transitions==0.9.0
|
|||
# via junos-eznc
|
||||
ttp-templates==0.3.5
|
||||
# via napalm
|
||||
ttp==0.9.4
|
||||
ttp==0.9.5
|
||||
# via
|
||||
# napalm
|
||||
# ttp-templates
|
||||
|
@ -566,7 +566,7 @@ twilio==8.2.2
|
|||
# via -r requirements/static/ci/linux.in
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/linux.txt
|
||||
# napalm
|
||||
|
|
|
@ -142,11 +142,11 @@ pyzmq==25.1.0
|
|||
# pytest-salt-factories
|
||||
requests==2.31.0
|
||||
# via -r requirements/base.txt
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# pydantic
|
||||
# pytest-shell-utilities
|
||||
|
|
|
@ -135,11 +135,11 @@ requests==2.31.0
|
|||
# via
|
||||
# -r requirements/base.txt
|
||||
# docker
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# pydantic
|
||||
# pytest-shell-utilities
|
||||
|
|
|
@ -439,7 +439,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# portend
|
||||
|
@ -455,7 +455,7 @@ tornado==6.3.2
|
|||
# -r requirements/base.txt
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.11/windows.txt
|
||||
# pydantic
|
||||
|
|
|
@ -151,11 +151,11 @@ sphinxcontrib-serializinghtml==1.1.5
|
|||
# via sphinx
|
||||
sphinxcontrib-spelling==8.0.0
|
||||
# via -r requirements/static/ci/docs.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# portend
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.8/linux.txt
|
||||
# pydantic
|
||||
|
|
|
@ -481,7 +481,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
|
||||
# portend
|
||||
|
@ -512,7 +512,7 @@ ttp==0.9.5
|
|||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/freebsd.txt
|
||||
# napalm
|
||||
|
|
|
@ -543,7 +543,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||
# portend
|
||||
|
@ -576,7 +576,7 @@ twilio==8.2.2
|
|||
# via -r requirements/static/ci/linux.in
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/linux.txt
|
||||
# napalm
|
||||
|
|
|
@ -446,7 +446,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# portend
|
||||
|
@ -464,7 +464,7 @@ tornado==6.3.2
|
|||
# -r requirements/base.txt
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.8/windows.txt
|
||||
# pydantic
|
||||
|
|
|
@ -485,7 +485,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
|
||||
# portend
|
||||
|
@ -516,7 +516,7 @@ ttp==0.9.5
|
|||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
|
||||
# napalm
|
||||
|
|
|
@ -146,11 +146,11 @@ sphinxcontrib-serializinghtml==1.1.5
|
|||
# via sphinx
|
||||
sphinxcontrib-spelling==8.0.0
|
||||
# via -r requirements/static/ci/docs.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# portend
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/py3.9/linux.txt
|
||||
# pydantic
|
||||
|
|
|
@ -477,7 +477,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
|
||||
# portend
|
||||
|
@ -508,7 +508,7 @@ ttp==0.9.5
|
|||
# ttp-templates
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
|
||||
# napalm
|
||||
|
|
|
@ -541,7 +541,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||
# portend
|
||||
|
@ -574,7 +574,7 @@ twilio==8.2.2
|
|||
# via -r requirements/static/ci/linux.in
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/linux.txt
|
||||
# napalm
|
||||
|
|
|
@ -442,7 +442,7 @@ sqlparse==0.4.4
|
|||
# via -r requirements/static/ci/common.in
|
||||
strict-rfc3339==0.7
|
||||
# via -r requirements/static/ci/common.in
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# portend
|
||||
|
@ -460,7 +460,7 @@ tornado==6.3.2
|
|||
# -r requirements/base.txt
|
||||
types-pyyaml==6.0.1
|
||||
# via responses
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via
|
||||
# -c requirements/static/ci/../pkg/py3.9/windows.txt
|
||||
# pydantic
|
||||
|
|
|
@ -107,13 +107,13 @@ six==1.16.0
|
|||
# via python-dateutil
|
||||
smmap==3.0.2
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/darwin.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -94,13 +94,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/freebsd.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/freebsd.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -94,13 +94,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/linux.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.2.5
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/linux.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -121,13 +121,13 @@ six==1.15.0
|
|||
# via python-dateutil
|
||||
smmap==4.0.0
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/windows.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via
|
||||
|
|
|
@ -107,13 +107,13 @@ six==1.16.0
|
|||
# via python-dateutil
|
||||
smmap==3.0.2
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/darwin.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -94,13 +94,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/freebsd.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/freebsd.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -94,13 +94,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/linux.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/linux.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -121,13 +121,13 @@ six==1.15.0
|
|||
# via python-dateutil
|
||||
smmap==4.0.0
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/windows.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via
|
||||
|
|
|
@ -96,13 +96,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/freebsd.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/freebsd.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -96,13 +96,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/linux.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/linux.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -124,13 +124,13 @@ six==1.15.0
|
|||
# via python-dateutil
|
||||
smmap==4.0.0
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/windows.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via
|
||||
|
|
|
@ -107,13 +107,13 @@ six==1.16.0
|
|||
# via python-dateutil
|
||||
smmap==3.0.2
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/darwin.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -94,13 +94,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/freebsd.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/freebsd.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -94,13 +94,13 @@ setproctitle==1.3.2
|
|||
# via -r requirements/static/pkg/linux.in
|
||||
six==1.16.0
|
||||
# via python-dateutil
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/static/pkg/linux.in
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via requests
|
||||
|
|
|
@ -122,13 +122,13 @@ six==1.15.0
|
|||
# via python-dateutil
|
||||
smmap==4.0.0
|
||||
# via gitdb
|
||||
tempora==5.2.2
|
||||
tempora==5.3.0
|
||||
# via portend
|
||||
timelib==0.3.0
|
||||
# via -r requirements/windows.txt
|
||||
tornado==6.3.2
|
||||
# via -r requirements/base.txt
|
||||
typing-extensions==4.6.2
|
||||
typing-extensions==4.6.3
|
||||
# via pydantic
|
||||
urllib3==1.26.6
|
||||
# via
|
||||
|
|
|
@ -24,15 +24,6 @@ authenticated against. This defaults to `login`
|
|||
|
||||
The Python interface to PAM does not support authenticating as ``root``.
|
||||
|
||||
.. note:: Using PAM groups with SSSD groups on python2.
|
||||
|
||||
To use sssd with the PAM eauth module and groups the `pysss` module is
|
||||
needed. On RedHat/CentOS this is `python-sss`.
|
||||
|
||||
This should not be needed with python >= 3.3, because the `os` modules has the
|
||||
`getgrouplist` function.
|
||||
|
||||
|
||||
.. note:: This module executes itself in a subprocess in order to user the system python
|
||||
and pam libraries. We do this to avoid openssl version conflicts when
|
||||
running under a salt onedir build.
|
||||
|
@ -113,7 +104,7 @@ class PamMessage(Structure):
|
|||
]
|
||||
|
||||
def __repr__(self):
|
||||
return "<PamMessage {} '{}'>".format(self.msg_style, self.msg)
|
||||
return f"<PamMessage {self.msg_style} '{self.msg}'>"
|
||||
|
||||
|
||||
class PamResponse(Structure):
|
||||
|
@ -127,7 +118,7 @@ class PamResponse(Structure):
|
|||
]
|
||||
|
||||
def __repr__(self):
|
||||
return "<PamResponse {} '{}'>".format(self.resp_retcode, self.resp)
|
||||
return f"<PamResponse {self.resp_retcode} '{self.resp}'>"
|
||||
|
||||
|
||||
CONV_FUNC = CFUNCTYPE(
|
||||
|
@ -245,8 +236,7 @@ def authenticate(username, password):
|
|||
ret = subprocess.run(
|
||||
[str(pyexe), str(pyfile)],
|
||||
env=env,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
)
|
||||
if ret.returncode == 0:
|
||||
|
|
|
@ -328,9 +328,12 @@ def load_args_and_kwargs(func, args, data=None, ignore_invalid=False):
|
|||
invalid_kwargs = []
|
||||
|
||||
for arg in args:
|
||||
if isinstance(arg, dict) and arg.pop("__kwarg__", False) is True:
|
||||
if isinstance(arg, dict) and arg.get("__kwarg__", False) is True:
|
||||
# if the arg is a dict with __kwarg__ == True, then its a kwarg
|
||||
for key, val in arg.items():
|
||||
# Skip __kwarg__ when checking kwargs
|
||||
if key == "__kwarg__":
|
||||
continue
|
||||
if argspec.keywords or key in argspec.args:
|
||||
# Function supports **kwargs or is a positional argument to
|
||||
# the function.
|
||||
|
|
278
salt/state.py
278
salt/state.py
|
@ -449,145 +449,157 @@ class Compiler:
|
|||
if not isinstance(high, dict):
|
||||
errors.append("High data is not a dictionary and is invalid")
|
||||
reqs = OrderedDict()
|
||||
for name, body in high.items():
|
||||
if name.startswith("__"):
|
||||
continue
|
||||
if not isinstance(name, str):
|
||||
errors.append(
|
||||
"ID '{}' in SLS '{}' is not formed as a string, but is a {}".format(
|
||||
name, body["__sls__"], type(name).__name__
|
||||
)
|
||||
)
|
||||
if not isinstance(body, dict):
|
||||
err = "The type {} in {} is not formatted as a dictionary".format(
|
||||
name, body
|
||||
)
|
||||
errors.append(err)
|
||||
continue
|
||||
for state in body:
|
||||
if state.startswith("__"):
|
||||
continue
|
||||
if not isinstance(body[state], list):
|
||||
if not errors:
|
||||
for name, body in high.items():
|
||||
try:
|
||||
if name.startswith("__"):
|
||||
continue
|
||||
except (AttributeError, TypeError):
|
||||
# Do not traceback on non string state ID
|
||||
# handle the error properly
|
||||
pass
|
||||
|
||||
if not isinstance(name, str):
|
||||
errors.append(
|
||||
"State '{}' in SLS '{}' is not formed as a list".format(
|
||||
name, body["__sls__"]
|
||||
"ID '{}' in SLS '{}' is not formed as a string, but is a {}. It may need to be quoted".format(
|
||||
name, body["__sls__"], type(name).__name__
|
||||
)
|
||||
)
|
||||
else:
|
||||
fun = 0
|
||||
if "." in state:
|
||||
fun += 1
|
||||
for arg in body[state]:
|
||||
if isinstance(arg, str):
|
||||
fun += 1
|
||||
if " " in arg.strip():
|
||||
errors.append(
|
||||
'The function "{}" in state '
|
||||
'"{}" in SLS "{}" has '
|
||||
"whitespace, a function with whitespace is "
|
||||
"not supported, perhaps this is an argument "
|
||||
'that is missing a ":"'.format(
|
||||
arg, name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
elif isinstance(arg, dict):
|
||||
# The arg is a dict, if the arg is require or
|
||||
# watch, it must be a list.
|
||||
#
|
||||
# Add the requires to the reqs dict and check them
|
||||
# all for recursive requisites.
|
||||
argfirst = next(iter(arg))
|
||||
if argfirst in ("require", "watch", "prereq", "onchanges"):
|
||||
if not isinstance(arg[argfirst], list):
|
||||
errors.append(
|
||||
"The {} statement in state '{}' in SLS '{}' "
|
||||
"needs to be formed as a list".format(
|
||||
argfirst, name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
# It is a list, verify that the members of the
|
||||
# list are all single key dicts.
|
||||
else:
|
||||
reqs[name] = {"state": state}
|
||||
for req in arg[argfirst]:
|
||||
if isinstance(req, str):
|
||||
req = {"id": req}
|
||||
if not isinstance(req, dict):
|
||||
errors.append(
|
||||
"Requisite declaration {} in SLS {} "
|
||||
"is not formed as a single key "
|
||||
"dictionary".format(
|
||||
req, body["__sls__"]
|
||||
)
|
||||
)
|
||||
continue
|
||||
req_key = next(iter(req))
|
||||
req_val = req[req_key]
|
||||
if "." in req_key:
|
||||
errors.append(
|
||||
"Invalid requisite type '{}' "
|
||||
"in state '{}', in SLS "
|
||||
"'{}'. Requisite types must "
|
||||
"not contain dots, did you "
|
||||
"mean '{}'?".format(
|
||||
req_key,
|
||||
name,
|
||||
body["__sls__"],
|
||||
req_key[: req_key.find(".")],
|
||||
)
|
||||
)
|
||||
if not ishashable(req_val):
|
||||
errors.append(
|
||||
'Illegal requisite "{}", is SLS {}\n'.format(
|
||||
str(req_val),
|
||||
body["__sls__"],
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
# Check for global recursive requisites
|
||||
reqs[name][req_val] = req_key
|
||||
# I am going beyond 80 chars on
|
||||
# purpose, this is just too much
|
||||
# of a pain to deal with otherwise
|
||||
if req_val in reqs:
|
||||
if name in reqs[req_val]:
|
||||
if reqs[req_val][name] == state:
|
||||
if (
|
||||
reqs[req_val]["state"]
|
||||
== reqs[name][req_val]
|
||||
):
|
||||
errors.append(
|
||||
"A recursive requisite was"
|
||||
' found, SLS "{}" ID "{}"'
|
||||
' ID "{}"'.format(
|
||||
body["__sls__"],
|
||||
name,
|
||||
req_val,
|
||||
)
|
||||
)
|
||||
# Make sure that there is only one key in the
|
||||
# dict
|
||||
if len(list(arg)) != 1:
|
||||
errors.append(
|
||||
"Multiple dictionaries defined in argument "
|
||||
"of state '{}' in SLS '{}'".format(
|
||||
name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
if not fun:
|
||||
if state == "require" or state == "watch":
|
||||
continue
|
||||
if not isinstance(body, dict):
|
||||
err = "The type {} in {} is not formatted as a dictionary".format(
|
||||
name, body
|
||||
)
|
||||
errors.append(err)
|
||||
continue
|
||||
for state in body:
|
||||
if state.startswith("__"):
|
||||
continue
|
||||
if not isinstance(body[state], list):
|
||||
errors.append(
|
||||
"No function declared in state '{}' in SLS '{}'".format(
|
||||
state, body["__sls__"]
|
||||
"State '{}' in SLS '{}' is not formed as a list".format(
|
||||
name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
elif fun > 1:
|
||||
errors.append(
|
||||
"Too many functions declared in state '{}' in "
|
||||
"SLS '{}'".format(state, body["__sls__"])
|
||||
)
|
||||
else:
|
||||
fun = 0
|
||||
if "." in state:
|
||||
fun += 1
|
||||
for arg in body[state]:
|
||||
if isinstance(arg, str):
|
||||
fun += 1
|
||||
if " " in arg.strip():
|
||||
errors.append(
|
||||
'The function "{}" in state '
|
||||
'"{}" in SLS "{}" has '
|
||||
"whitespace, a function with whitespace is "
|
||||
"not supported, perhaps this is an argument "
|
||||
'that is missing a ":"'.format(
|
||||
arg, name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
elif isinstance(arg, dict):
|
||||
# The arg is a dict, if the arg is require or
|
||||
# watch, it must be a list.
|
||||
#
|
||||
# Add the requires to the reqs dict and check them
|
||||
# all for recursive requisites.
|
||||
argfirst = next(iter(arg))
|
||||
if argfirst in (
|
||||
"require",
|
||||
"watch",
|
||||
"prereq",
|
||||
"onchanges",
|
||||
):
|
||||
if not isinstance(arg[argfirst], list):
|
||||
errors.append(
|
||||
"The {} statement in state '{}' in SLS '{}' "
|
||||
"needs to be formed as a list".format(
|
||||
argfirst, name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
# It is a list, verify that the members of the
|
||||
# list are all single key dicts.
|
||||
else:
|
||||
reqs[name] = {"state": state}
|
||||
for req in arg[argfirst]:
|
||||
if isinstance(req, str):
|
||||
req = {"id": req}
|
||||
if not isinstance(req, dict):
|
||||
errors.append(
|
||||
"Requisite declaration {} in SLS {} "
|
||||
"is not formed as a single key "
|
||||
"dictionary".format(
|
||||
req, body["__sls__"]
|
||||
)
|
||||
)
|
||||
continue
|
||||
req_key = next(iter(req))
|
||||
req_val = req[req_key]
|
||||
if "." in req_key:
|
||||
errors.append(
|
||||
"Invalid requisite type '{}' "
|
||||
"in state '{}', in SLS "
|
||||
"'{}'. Requisite types must "
|
||||
"not contain dots, did you "
|
||||
"mean '{}'?".format(
|
||||
req_key,
|
||||
name,
|
||||
body["__sls__"],
|
||||
req_key[: req_key.find(".")],
|
||||
)
|
||||
)
|
||||
if not ishashable(req_val):
|
||||
errors.append(
|
||||
'Illegal requisite "{}", is SLS {}\n'.format(
|
||||
str(req_val),
|
||||
body["__sls__"],
|
||||
)
|
||||
)
|
||||
continue
|
||||
|
||||
# Check for global recursive requisites
|
||||
reqs[name][req_val] = req_key
|
||||
# I am going beyond 80 chars on
|
||||
# purpose, this is just too much
|
||||
# of a pain to deal with otherwise
|
||||
if req_val in reqs:
|
||||
if name in reqs[req_val]:
|
||||
if reqs[req_val][name] == state:
|
||||
if (
|
||||
reqs[req_val]["state"]
|
||||
== reqs[name][req_val]
|
||||
):
|
||||
errors.append(
|
||||
"A recursive requisite was"
|
||||
' found, SLS "{}" ID "{}"'
|
||||
' ID "{}"'.format(
|
||||
body["__sls__"],
|
||||
name,
|
||||
req_val,
|
||||
)
|
||||
)
|
||||
# Make sure that there is only one key in the
|
||||
# dict
|
||||
if len(list(arg)) != 1:
|
||||
errors.append(
|
||||
"Multiple dictionaries defined in argument "
|
||||
"of state '{}' in SLS '{}'".format(
|
||||
name, body["__sls__"]
|
||||
)
|
||||
)
|
||||
if not fun:
|
||||
if state == "require" or state == "watch":
|
||||
continue
|
||||
errors.append(
|
||||
"No function declared in state '{}' in SLS '{}'".format(
|
||||
state, body["__sls__"]
|
||||
)
|
||||
)
|
||||
elif fun > 1:
|
||||
errors.append(
|
||||
"Too many functions declared in state '{}' in "
|
||||
"SLS '{}'".format(state, body["__sls__"])
|
||||
)
|
||||
return errors
|
||||
|
||||
def order_chunks(self, chunks):
|
||||
|
|
|
@ -101,6 +101,9 @@ def present(
|
|||
docker_image.present:
|
||||
- tag: mytag
|
||||
|
||||
name
|
||||
The name of the docker image.
|
||||
|
||||
tag
|
||||
Tag name for the image. Required when using ``build``, ``load``, or
|
||||
``sls`` to create the image, but optional if pulling from a repository.
|
||||
|
@ -146,10 +149,14 @@ def present(
|
|||
.. versionchanged:: 2018.3.0
|
||||
The ``tag`` must be manually specified using the ``tag`` argument.
|
||||
|
||||
force : False
|
||||
force
|
||||
Set this parameter to ``True`` to force Salt to pull/build/load the
|
||||
image even if it is already present.
|
||||
|
||||
insecure_registry
|
||||
If ``True``, the Docker client will permit the use of insecure
|
||||
(non-HTTPS) registries.
|
||||
|
||||
client_timeout
|
||||
Timeout in seconds for the Docker client. This is not a timeout for
|
||||
the state, but for receiving a response from the API.
|
||||
|
@ -212,6 +219,10 @@ def present(
|
|||
``pillar_roots`` or an external Pillar source.
|
||||
|
||||
.. versionadded:: 2018.3.0
|
||||
|
||||
kwargs
|
||||
Additional keyword arguments to pass to
|
||||
:py:func:`docker.build <salt.modules.dockermod.build>`
|
||||
"""
|
||||
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
|
||||
|
||||
|
@ -375,6 +386,9 @@ def absent(name=None, images=None, force=False):
|
|||
specified either using ``repo:tag`` notation, or just the repo name (in
|
||||
which case a tag of ``latest`` is assumed).
|
||||
|
||||
name
|
||||
The name of the docker image.
|
||||
|
||||
images
|
||||
Run this state on more than one image at a time. The following two
|
||||
examples accomplish the same thing:
|
||||
|
@ -401,7 +415,7 @@ def absent(name=None, images=None, force=False):
|
|||
all the deletions in a single run, rather than executing the state
|
||||
separately on each image (as it would in the first example).
|
||||
|
||||
force : False
|
||||
force
|
||||
Salt will fail to remove any images currently in use by a container.
|
||||
Set this option to true to remove the image even if it is already
|
||||
present.
|
||||
|
|
|
@ -141,7 +141,7 @@ class OptionParser(optparse.OptionParser):
|
|||
_mixin_prio_ = sys.maxsize - 200
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault("version", "%prog {}".format(self.VERSION))
|
||||
kwargs.setdefault("version", f"%prog {self.VERSION}")
|
||||
kwargs.setdefault("usage", self.usage)
|
||||
if self.description:
|
||||
kwargs.setdefault("description", self.description)
|
||||
|
@ -187,7 +187,7 @@ class OptionParser(optparse.OptionParser):
|
|||
# Gather and run the process_<option> functions in the proper order
|
||||
process_option_funcs = []
|
||||
for option_key in options.__dict__:
|
||||
process_option_func = getattr(self, "process_{}".format(option_key), None)
|
||||
process_option_func = getattr(self, f"process_{option_key}", None)
|
||||
if process_option_func is not None:
|
||||
process_option_funcs.append(process_option_func)
|
||||
|
||||
|
@ -274,7 +274,7 @@ class OptionParser(optparse.OptionParser):
|
|||
temp_log_handler.flush()
|
||||
salt._logging.shutdown_temp_handler()
|
||||
if isinstance(msg, str) and msg and msg[-1] != "\n":
|
||||
msg = "{}\n".format(msg)
|
||||
msg = f"{msg}\n"
|
||||
optparse.OptionParser.exit(self, status, msg)
|
||||
|
||||
def error(self, msg):
|
||||
|
@ -287,7 +287,7 @@ class OptionParser(optparse.OptionParser):
|
|||
self.print_usage(sys.stderr)
|
||||
self.exit(
|
||||
salt.defaults.exitcodes.EX_USAGE,
|
||||
"{}: error: {}\n".format(self.get_prog_name(), msg),
|
||||
f"{self.get_prog_name()}: error: {msg}\n",
|
||||
)
|
||||
|
||||
|
||||
|
@ -408,7 +408,7 @@ class SaltfileMixIn(metaclass=MixInMeta):
|
|||
return
|
||||
|
||||
if not os.path.isfile(self.options.saltfile):
|
||||
self.error("'{}' file does not exist.\n".format(self.options.saltfile))
|
||||
self.error(f"'{self.options.saltfile}' file does not exist.\n")
|
||||
|
||||
# Make sure we have an absolute path
|
||||
self.options.saltfile = os.path.abspath(self.options.saltfile)
|
||||
|
@ -422,7 +422,7 @@ class SaltfileMixIn(metaclass=MixInMeta):
|
|||
self.error(error.message)
|
||||
self.exit(
|
||||
salt.defaults.exitcodes.EX_GENERIC,
|
||||
"{}: error: {}\n".format(self.get_prog_name(), error.message),
|
||||
f"{self.get_prog_name()}: error: {error.message}\n",
|
||||
)
|
||||
|
||||
if not saltfile_config:
|
||||
|
@ -568,7 +568,7 @@ class ConfigDirMixIn(metaclass=MixInMeta):
|
|||
try:
|
||||
self.config.update(self.setup_config())
|
||||
except OSError as exc:
|
||||
self.error("Failed to load configuration: {}".format(exc))
|
||||
self.error(f"Failed to load configuration: {exc}")
|
||||
|
||||
def get_config_file_path(self, configfile=None):
|
||||
if configfile is None:
|
||||
|
@ -611,7 +611,7 @@ class LogLevelMixIn(metaclass=MixInMeta):
|
|||
dest=self._loglevel_config_setting_name_,
|
||||
choices=list(salt._logging.LOG_LEVELS),
|
||||
help="Console logging log level. One of {}. Default: '{}'.".format(
|
||||
", ".join(["'{}'".format(n) for n in salt._logging.SORTED_LEVEL_NAMES]),
|
||||
", ".join([f"'{n}'" for n in salt._logging.SORTED_LEVEL_NAMES]),
|
||||
self._default_logging_level_,
|
||||
),
|
||||
)
|
||||
|
@ -629,7 +629,7 @@ class LogLevelMixIn(metaclass=MixInMeta):
|
|||
action="callback",
|
||||
type="string",
|
||||
callback=_logfile_callback,
|
||||
help="Log file path. Default: '{}'.".format(self._default_logging_logfile_),
|
||||
help=f"Log file path. Default: '{self._default_logging_logfile_}'.",
|
||||
)
|
||||
|
||||
group.add_option(
|
||||
|
@ -637,7 +637,7 @@ class LogLevelMixIn(metaclass=MixInMeta):
|
|||
dest=self._logfile_loglevel_config_setting_name_,
|
||||
choices=list(salt._logging.SORTED_LEVEL_NAMES),
|
||||
help="Logfile logging log level. One of {}. Default: '{}'.".format(
|
||||
", ".join(["'{}'".format(n) for n in salt._logging.SORTED_LEVEL_NAMES]),
|
||||
", ".join([f"'{n}'" for n in salt._logging.SORTED_LEVEL_NAMES]),
|
||||
self._default_logging_level_,
|
||||
),
|
||||
)
|
||||
|
@ -858,9 +858,7 @@ class LogLevelMixIn(metaclass=MixInMeta):
|
|||
str(logfile),
|
||||
str(logfile_basename),
|
||||
)
|
||||
logfile = os.path.join(
|
||||
user_salt_dir, "{}.log".format(logfile_basename)
|
||||
)
|
||||
logfile = os.path.join(user_salt_dir, f"{logfile_basename}.log")
|
||||
|
||||
# If we haven't changed the logfile path and it's not writeable,
|
||||
# salt will fail once we try to setup the logfile logging.
|
||||
|
@ -915,7 +913,7 @@ class RunUserMixin(metaclass=MixInMeta):
|
|||
|
||||
def _mixin_setup(self):
|
||||
self.add_option(
|
||||
"-u", "--user", help="Specify user to run {}.".format(self.get_prog_name())
|
||||
"-u", "--user", help=f"Specify user to run {self.get_prog_name()}."
|
||||
)
|
||||
|
||||
|
||||
|
@ -928,14 +926,12 @@ class DaemonMixIn(metaclass=MixInMeta):
|
|||
"--daemon",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Run the {} as a daemon.".format(self.get_prog_name()),
|
||||
help=f"Run the {self.get_prog_name()} as a daemon.",
|
||||
)
|
||||
self.add_option(
|
||||
"--pid-file",
|
||||
dest="pidfile",
|
||||
default=os.path.join(
|
||||
syspaths.PIDFILE_DIR, "{}.pid".format(self.get_prog_name())
|
||||
),
|
||||
default=os.path.join(syspaths.PIDFILE_DIR, f"{self.get_prog_name()}.pid"),
|
||||
help="Specify the location of the pidfile. Default: '%default'.",
|
||||
)
|
||||
|
||||
|
@ -1058,7 +1054,7 @@ class DaemonMixIn(metaclass=MixInMeta):
|
|||
elif signum == signal.SIGTERM:
|
||||
msg += " received a SIGTERM."
|
||||
logging.getLogger(__name__).warning("%s Exiting.", msg)
|
||||
self.shutdown(exitmsg="{} Exited.".format(msg))
|
||||
self.shutdown(exitmsg=f"{msg} Exited.")
|
||||
|
||||
def shutdown(self, exitcode=0, exitmsg=None):
|
||||
self.exit(exitcode, exitmsg)
|
||||
|
@ -1175,7 +1171,7 @@ class TargetOptionsMixIn(metaclass=MixInMeta):
|
|||
if getattr(self.options, opt.dest):
|
||||
self.selected_target_option = opt.dest
|
||||
|
||||
funcname = "process_{}".format(option.dest)
|
||||
funcname = f"process_{option.dest}"
|
||||
if not hasattr(self, funcname):
|
||||
setattr(self, funcname, partial(process, option))
|
||||
|
||||
|
@ -1433,7 +1429,7 @@ class OutputOptionsMixIn(metaclass=MixInMeta):
|
|||
return
|
||||
self.selected_output_option = opt.dest
|
||||
|
||||
funcname = "process_{}".format(option.dest)
|
||||
funcname = f"process_{option.dest}"
|
||||
if not hasattr(self, funcname):
|
||||
setattr(self, funcname, partial(process, option))
|
||||
|
||||
|
@ -1452,9 +1448,7 @@ class OutputOptionsMixIn(metaclass=MixInMeta):
|
|||
# it. This way we keep the file permissions.
|
||||
pass
|
||||
except OSError as exc:
|
||||
self.error(
|
||||
"{}: Access denied: {}".format(self.options.output_file, exc)
|
||||
)
|
||||
self.error(f"{self.options.output_file}: Access denied: {exc}")
|
||||
|
||||
def process_state_verbose(self):
|
||||
if self.options.state_verbose == "True" or self.options.state_verbose == "true":
|
||||
|
@ -1709,7 +1703,7 @@ class CloudQueriesMixIn(metaclass=MixInMeta):
|
|||
)
|
||||
self.selected_query_option = query
|
||||
|
||||
funcname = "process_{}".format(option.dest)
|
||||
funcname = f"process_{option.dest}"
|
||||
if not hasattr(self, funcname):
|
||||
setattr(self, funcname, partial(process, option))
|
||||
|
||||
|
@ -1909,7 +1903,7 @@ class JIDMixin:
|
|||
def process_jid(self):
|
||||
if self.options.jid is not None:
|
||||
if not salt.utils.jid.is_jid(self.options.jid):
|
||||
self.error("'{}' is not a valid JID".format(self.options.jid))
|
||||
self.error(f"'{self.options.jid}' is not a valid JID")
|
||||
|
||||
|
||||
class MasterOptionParser(
|
||||
|
@ -2649,7 +2643,7 @@ class SaltKeyOptionParser(
|
|||
default=".",
|
||||
help=(
|
||||
"Set the directory to save the generated keypair, only "
|
||||
"works with \"gen_keys_dir\" option. Default: '%default'."
|
||||
"works with \"--gen-keys\" option. Default: '%default'."
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -2755,9 +2749,7 @@ class SaltKeyOptionParser(
|
|||
if not self.options.list:
|
||||
return
|
||||
if not self.options.list.startswith(("acc", "pre", "un", "rej", "den", "all")):
|
||||
self.error(
|
||||
"'{}' is not a valid argument to '--list'".format(self.options.list)
|
||||
)
|
||||
self.error(f"'{self.options.list}' is not a valid argument to '--list'")
|
||||
|
||||
def process_keysize(self):
|
||||
if self.options.keysize < 2048:
|
||||
|
@ -2767,10 +2759,11 @@ class SaltKeyOptionParser(
|
|||
|
||||
def process_gen_keys_dir(self):
|
||||
# Schedule __create_keys_dir() to run if there's a value for
|
||||
# --create-keys-dir
|
||||
self._mixin_after_parsed_funcs.append(
|
||||
self.__create_keys_dir
|
||||
) # pylint: disable=no-member
|
||||
# --gen-keys-dir
|
||||
if self.options.gen_keys:
|
||||
self._mixin_after_parsed_funcs.append(
|
||||
self.__create_keys_dir
|
||||
) # pylint: disable=no-member
|
||||
|
||||
def __create_keys_dir(self):
|
||||
if not os.path.isdir(self.config["gen_keys_dir"]):
|
||||
|
|
|
@ -31,13 +31,6 @@ try:
|
|||
except ImportError:
|
||||
HAS_GRP = False
|
||||
|
||||
try:
|
||||
import pysss
|
||||
|
||||
HAS_PYSSS = True
|
||||
except ImportError:
|
||||
HAS_PYSSS = False
|
||||
|
||||
try:
|
||||
import salt.utils.win_functions
|
||||
|
||||
|
@ -289,30 +282,35 @@ def get_group_list(user, include_default=True):
|
|||
return []
|
||||
group_names = None
|
||||
ugroups = set()
|
||||
if hasattr(os, "getgrouplist"):
|
||||
# Try os.getgrouplist, available in python >= 3.3
|
||||
log.trace("Trying os.getgrouplist for '%s'", user)
|
||||
try:
|
||||
user_group_list = os.getgrouplist(user, pwd.getpwnam(user).pw_gid)
|
||||
group_names = [
|
||||
_group.gr_name
|
||||
for _group in grp.getgrall()
|
||||
if _group.gr_gid in user_group_list
|
||||
]
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
elif HAS_PYSSS:
|
||||
# Try pysss.getgrouplist
|
||||
log.trace("Trying pysss.getgrouplist for '%s'", user)
|
||||
try:
|
||||
group_names = list(pysss.getgrouplist(user))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
# Try os.getgrouplist, available in python >= 3.3
|
||||
log.trace("Trying os.getgrouplist for '%s'", user)
|
||||
try:
|
||||
user_group_list = sorted(os.getgrouplist(user, pwd.getpwnam(user).pw_gid))
|
||||
local_grall = _getgrall()
|
||||
local_gids = sorted(lgrp.gr_gid for lgrp in local_grall)
|
||||
max_idx = -1
|
||||
local_max = local_gids[max_idx]
|
||||
while local_max >= 65000:
|
||||
max_idx -= 1
|
||||
local_max = local_gids[max_idx]
|
||||
user_group_list_local = [lgrp for lgrp in user_group_list if lgrp <= local_max]
|
||||
user_group_list_remote = [rgrp for rgrp in user_group_list if rgrp > local_max]
|
||||
local_group_names = [
|
||||
_group.gr_name
|
||||
for _group in local_grall
|
||||
if _group.gr_gid in user_group_list_local
|
||||
]
|
||||
remote_group_names = [
|
||||
grp.getgrgid(group_id).gr_name for group_id in user_group_list_remote
|
||||
]
|
||||
group_names = local_group_names + remote_group_names
|
||||
except Exception: # pylint: disable=broad-except
|
||||
pass
|
||||
|
||||
if group_names is None:
|
||||
# Fall back to generic code
|
||||
# Include the user's default group to match behavior of
|
||||
# os.getgrouplist() and pysss.getgrouplist()
|
||||
# os.getgrouplist()
|
||||
log.trace("Trying generic group list for '%s'", user)
|
||||
group_names = [g.gr_name for g in grp.getgrall() if user in g.gr_mem]
|
||||
try:
|
||||
|
@ -389,3 +387,24 @@ def get_gid(group=None):
|
|||
return grp.getgrnam(group).gr_gid
|
||||
except KeyError:
|
||||
return None
|
||||
|
||||
|
||||
def _getgrall(root=None):
|
||||
"""
|
||||
Alternative implemetantion for getgrall, that uses only /etc/group
|
||||
"""
|
||||
ret = []
|
||||
root = "/" if not root else root
|
||||
etc_group = os.path.join(root, "etc/group")
|
||||
with salt.utils.files.fopen(etc_group) as fp_:
|
||||
for line in fp_:
|
||||
line = salt.utils.stringutils.to_unicode(line)
|
||||
comps = line.strip().split(":")
|
||||
# Generate a getgrall compatible output
|
||||
comps[2] = int(comps[2])
|
||||
if comps[3]:
|
||||
comps[3] = [mem.strip() for mem in comps[3].split(",")]
|
||||
else:
|
||||
comps[3] = []
|
||||
ret.append(grp.struct_group(comps))
|
||||
return ret
|
||||
|
|
|
@ -44,7 +44,7 @@ class PublishModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertTrue(name in ret)
|
||||
|
||||
self.assertEqual(ret["cheese"], "spam")
|
||||
self.assertEqual(ret["__pub_arg"], [{"cheese": "spam"}])
|
||||
self.assertEqual(ret["__pub_arg"], [{"__kwarg__": True, "cheese": "spam"}])
|
||||
self.assertEqual(ret["__pub_id"], "minion")
|
||||
self.assertEqual(ret["__pub_fun"], "test.kwarg")
|
||||
|
||||
|
@ -125,7 +125,7 @@ class PublishModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertTrue(name in ret)
|
||||
|
||||
self.assertEqual(ret["cheese"], "spam")
|
||||
self.assertEqual(ret["__pub_arg"], [{"cheese": "spam"}])
|
||||
self.assertEqual(ret["__pub_arg"], [{"__kwarg__": True, "cheese": "spam"}])
|
||||
self.assertEqual(ret["__pub_id"], "minion")
|
||||
self.assertEqual(ret["__pub_fun"], "test.kwarg")
|
||||
|
||||
|
|
|
@ -353,3 +353,38 @@ def test_onlyif_req(state, subtests):
|
|||
assert ret.result is False
|
||||
assert ret.changes
|
||||
assert ret.comment == "Failure!"
|
||||
|
||||
|
||||
def test_listen_requisite_not_exist(state, state_tree):
|
||||
"""
|
||||
Tests a simple state using the listen requisite
|
||||
when the state id does not exist
|
||||
"""
|
||||
sls_contents = """
|
||||
successful_changing_state:
|
||||
cmd.run:
|
||||
- name: echo "Successful Change"
|
||||
|
||||
non_changing_state:
|
||||
test.succeed_without_changes
|
||||
|
||||
test_listening_change_state:
|
||||
cmd.run:
|
||||
- name: echo "Listening State"
|
||||
- listen:
|
||||
- cmd: successful_changing_state
|
||||
|
||||
test_listening_non_changing_state:
|
||||
cmd.run:
|
||||
- name: echo "Only run once"
|
||||
- listen:
|
||||
- test: non_changing_state_not_exist
|
||||
"""
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state.sls("requisite")
|
||||
assert (
|
||||
ret.raw[
|
||||
"Listen_Error_|-listen_non_changing_state_not_exist_|-listen_test_|-Listen_Error"
|
||||
]["comment"]
|
||||
== "Referenced state test: non_changing_state_not_exist does not exist"
|
||||
)
|
||||
|
|
|
@ -7,6 +7,37 @@ pytestmark = [
|
|||
pytest.mark.core_test,
|
||||
]
|
||||
|
||||
import salt.modules.cmdmod as cmd
|
||||
import salt.modules.config as config
|
||||
import salt.modules.grains as grains
|
||||
import salt.modules.saltutil as saltutil
|
||||
import salt.modules.state as state_mod
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(minion_opts):
|
||||
return {
|
||||
state_mod: {
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": {
|
||||
"config.option": config.option,
|
||||
"config.get": config.get,
|
||||
"saltutil.is_running": saltutil.is_running,
|
||||
"grains.get": grains.get,
|
||||
"cmd.run": cmd.run,
|
||||
},
|
||||
},
|
||||
config: {
|
||||
"__opts__": minion_opts,
|
||||
},
|
||||
saltutil: {
|
||||
"__opts__": minion_opts,
|
||||
},
|
||||
grains: {
|
||||
"__opts__": minion_opts,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def test_requisites_mixed_require_prereq_use_1(state, state_tree):
|
||||
"""
|
||||
|
@ -401,3 +432,23 @@ def test_issue_30161_unless_and_onlyif_together(state, state_tree, tmp_path):
|
|||
}
|
||||
for slsid in _expected:
|
||||
assert ret[slsid].comment == _expected[slsid]["comment"]
|
||||
|
||||
|
||||
def test_requisites_mixed_illegal_req(state_tree):
|
||||
"""
|
||||
Call sls file containing several requisites.
|
||||
When one of the requisites is illegal.
|
||||
"""
|
||||
sls_contents = """
|
||||
A:
|
||||
cmd.run:
|
||||
- name: echo A
|
||||
B:
|
||||
cmd.run:
|
||||
- name: echo B
|
||||
- require:
|
||||
- cmd: ["A"]
|
||||
"""
|
||||
with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree):
|
||||
ret = state_mod.sls("requisite")
|
||||
assert ret == ["Illegal requisite \"['A']\", please check your syntax.\n"]
|
||||
|
|
|
@ -7,10 +7,16 @@ import time
|
|||
import pytest
|
||||
|
||||
import salt.loader
|
||||
import salt.modules.cmdmod as cmd
|
||||
import salt.modules.config as config
|
||||
import salt.modules.grains as grains
|
||||
import salt.modules.saltutil as saltutil
|
||||
import salt.modules.state as state_mod
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.state as state_util
|
||||
import salt.utils.stringutils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -22,6 +28,32 @@ pytestmark = [
|
|||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(minion_opts):
|
||||
return {
|
||||
state_mod: {
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": {
|
||||
"config.option": config.option,
|
||||
"config.get": config.get,
|
||||
"saltutil.is_running": saltutil.is_running,
|
||||
"grains.get": grains.get,
|
||||
"cmd.run": cmd.run,
|
||||
},
|
||||
"__utils__": {"state.check_result": state_util.check_result},
|
||||
},
|
||||
config: {
|
||||
"__opts__": minion_opts,
|
||||
},
|
||||
saltutil: {
|
||||
"__opts__": minion_opts,
|
||||
},
|
||||
grains: {
|
||||
"__opts__": minion_opts,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def _check_skip(grains):
|
||||
if grains["os"] == "SUSE":
|
||||
return True
|
||||
|
@ -1032,3 +1064,20 @@ def test_state_sls_defaults(state, state_tree):
|
|||
for state_return in ret:
|
||||
assert state_return.result is True
|
||||
assert "echo 1" in state_return.comment
|
||||
|
||||
|
||||
def test_state_sls_mock_ret(state_tree):
|
||||
"""
|
||||
test state.sls when mock=True is passed
|
||||
"""
|
||||
sls_contents = """
|
||||
echo1:
|
||||
cmd.run:
|
||||
- name: "echo 'This is a test!'"
|
||||
"""
|
||||
with pytest.helpers.temp_file("mock.sls", sls_contents, state_tree):
|
||||
ret = state_mod.sls("mock", mock=True)
|
||||
assert (
|
||||
ret["cmd_|-echo1_|-echo 'This is a test!'_|-run"]["comment"]
|
||||
== "Not called, mocked"
|
||||
)
|
||||
|
|
44
tests/pytests/functional/utils/user/test__getgrall.py
Normal file
44
tests/pytests/functional/utils/user/test__getgrall.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
from textwrap import dedent
|
||||
|
||||
import pytest
|
||||
|
||||
pytest.importorskip("grp")
|
||||
|
||||
import grp
|
||||
|
||||
import salt.utils.user
|
||||
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def etc_group(tmp_path):
|
||||
etcgrp = tmp_path / "etc" / "group"
|
||||
etcgrp.parent.mkdir()
|
||||
etcgrp.write_text(
|
||||
dedent(
|
||||
"""games:x:50:
|
||||
docker:x:959:debian,salt
|
||||
salt:x:1000:"""
|
||||
)
|
||||
)
|
||||
return etcgrp
|
||||
|
||||
|
||||
def test__getgrall(etc_group):
|
||||
group_lines = [
|
||||
["games", "x", 50, []],
|
||||
["docker", "x", 959, ["debian", "salt"]],
|
||||
["salt", "x", 1000, []],
|
||||
]
|
||||
expected_grall = [grp.struct_group(comps) for comps in group_lines]
|
||||
|
||||
grall = salt.utils.user._getgrall(root=str(etc_group.parent.parent))
|
||||
|
||||
assert grall == expected_grall
|
||||
|
||||
|
||||
def test__getgrall_bad_format(etc_group):
|
||||
with etc_group.open("a") as _fp:
|
||||
_fp.write("\n# some comment here\n")
|
||||
|
||||
with pytest.raises(IndexError):
|
||||
salt.utils.user._getgrall(root=str(etc_group.parent.parent))
|
|
@ -337,6 +337,13 @@ def test_keys_generation(salt_key_cli, tmp_path):
|
|||
filename.chmod(0o700)
|
||||
|
||||
|
||||
def test_gen_keys_dir_without_gen_keys(salt_key_cli, tmp_path):
|
||||
gen_keys_path = tmp_path / "temp-gen-keys-path"
|
||||
ret = salt_key_cli.run("--gen-keys-dir", str(gen_keys_path))
|
||||
assert ret.returncode == 0
|
||||
assert not gen_keys_path.exists()
|
||||
|
||||
|
||||
def test_keys_generation_keysize_min(salt_key_cli, tmp_path):
|
||||
ret = salt_key_cli.run(
|
||||
"--gen-keys", "minibar", "--gen-keys-dir", str(tmp_path), "--keysize", "1024"
|
||||
|
|
669
tests/pytests/unit/state/test_reactor_compiler.py
Normal file
669
tests/pytests/unit/state/test_reactor_compiler.py
Normal file
|
@ -0,0 +1,669 @@
|
|||
import logging
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.minion
|
||||
import salt.state
|
||||
from salt.utils.odict import OrderedDict
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.core_test,
|
||||
]
|
||||
|
||||
|
||||
def test_compiler_render_template(minion_opts, tmp_path):
|
||||
"""
|
||||
Test Compiler.render_template
|
||||
"""
|
||||
minion = "poc-minion"
|
||||
kwargs = {
|
||||
"tag": f"salt/minion/{minion}/start",
|
||||
"data": {
|
||||
"id": minion,
|
||||
"cmd": "_minion_event",
|
||||
"pretag": None,
|
||||
"data": f"Minion {minion} started at Thu Sep 14 07:31:04 2023",
|
||||
"tag": f"salt/minion/{minion}/start",
|
||||
"_stamp": "2023-09-14T13:31:05.000316",
|
||||
},
|
||||
}
|
||||
|
||||
reactor_file = tmp_path / "reactor.sls"
|
||||
content = f"""
|
||||
highstate_run:
|
||||
local.state.apply:
|
||||
- tgt: {minion}
|
||||
- args:
|
||||
- mods: test
|
||||
"""
|
||||
with salt.utils.files.fopen(reactor_file, "w") as fp:
|
||||
fp.write(content)
|
||||
|
||||
mminion = salt.minion.MasterMinion(minion_opts)
|
||||
comp = salt.state.Compiler(minion_opts, mminion.rend)
|
||||
ret = comp.render_template(template=str(reactor_file), kwargs=kwargs)
|
||||
assert ret["highstate_run"]["local"][0]["tgt"] == minion
|
||||
assert ret["highstate_run"]["local"][1]["args"][0]["mods"] == "test"
|
||||
|
||||
|
||||
def test_compiler_render_template_doesnotexist(minion_opts, tmp_path):
|
||||
"""
|
||||
Test Compiler.render_template when
|
||||
the reactor file does not exist
|
||||
"""
|
||||
minion = "poc-minion"
|
||||
kwargs = {
|
||||
"tag": f"salt/minion/{minion}/start",
|
||||
"data": {
|
||||
"id": minion,
|
||||
"cmd": "_minion_event",
|
||||
"pretag": None,
|
||||
"data": f"Minion {minion} started at Thu Sep 14 07:31:04 2023",
|
||||
"tag": f"salt/minion/{minion}/start",
|
||||
"_stamp": "2023-09-14T13:31:05.000316",
|
||||
},
|
||||
}
|
||||
|
||||
reactor_file = tmp_path / "reactor.sls"
|
||||
mminion = salt.minion.MasterMinion(minion_opts)
|
||||
comp = salt.state.Compiler(minion_opts, mminion.rend)
|
||||
mock_pad = MagicMock(return_value=None)
|
||||
patch_pad = patch.object(comp, "pad_funcs", mock_pad)
|
||||
with patch_pad:
|
||||
ret = comp.render_template(template=str(reactor_file), kwargs=kwargs)
|
||||
assert ret == {}
|
||||
mock_pad.assert_not_called()
|
||||
|
||||
|
||||
def test_compiler_pad_funcs(minion_opts, tmp_path):
|
||||
"""
|
||||
Test Compiler.pad_funcs
|
||||
"""
|
||||
high = OrderedDict(
|
||||
[
|
||||
(
|
||||
"highstate_run",
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"local.state.apply",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[("args", [OrderedDict([("mods", "test")])])]
|
||||
),
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
exp = OrderedDict(
|
||||
[
|
||||
(
|
||||
"highstate_run",
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[("args", [OrderedDict([("mods", "test")])])]
|
||||
),
|
||||
"state.apply",
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
mminion = salt.minion.MasterMinion(minion_opts)
|
||||
comp = salt.state.Compiler(minion_opts, mminion.rend)
|
||||
ret = comp.pad_funcs(high)
|
||||
assert ret == exp
|
||||
|
||||
|
||||
def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path):
|
||||
"""
|
||||
Test Compiler.pad_funcs when using a shorter
|
||||
sls with no extra arguments
|
||||
"""
|
||||
high = OrderedDict([("master_pub", "wheel.key.master_key_str")])
|
||||
exp = OrderedDict([("master_pub", {"wheel": ["key.master_key_str"]})])
|
||||
|
||||
mminion = salt.minion.MasterMinion(minion_opts)
|
||||
comp = salt.state.Compiler(minion_opts, mminion.rend)
|
||||
ret = comp.pad_funcs(high)
|
||||
assert ret == exp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"high,exp",
|
||||
[
|
||||
(
|
||||
{
|
||||
"master_pub": {
|
||||
"wheel": ["key.master_key_str"],
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[],
|
||||
),
|
||||
(set(), ["High data is not a dictionary and is invalid"]),
|
||||
(
|
||||
{
|
||||
1234: {
|
||||
"wheel": ["key.master_key_str"],
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[
|
||||
"ID '1234' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a int. It may need to be quoted"
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
b"test": {
|
||||
"wheel": ["key.master_key_str"],
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[
|
||||
"ID 'b'test'' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bytes. It may need to be quoted"
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
True: {
|
||||
"wheel": ["key.master_key_str"],
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[
|
||||
"ID 'True' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bool. It may need to be quoted"
|
||||
],
|
||||
),
|
||||
(
|
||||
{"master_pub": ["wheel", "key.master_key_str"]},
|
||||
[
|
||||
"The type master_pub in ['wheel', 'key.master_key_str'] is not formatted as a dictionary"
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"master_pub": {
|
||||
"wheel": {"key.master_key_str"},
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[
|
||||
"State 'master_pub' in SLS '/srv/reactor/start.sls' is not formed as a list"
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"master_pub": {
|
||||
"wheel": ["key. master_key_str"],
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[
|
||||
'The function "key. master_key_str" in state "master_pub" in SLS "/srv/reactor/start.sls" has whitespace, a function with whitespace is not supported, perhaps this is an argument that is missing a ":"'
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"master_pub": {
|
||||
"wheel": ["key.master_key_str "],
|
||||
"__sls__": "/srv/reactor/start.sls",
|
||||
}
|
||||
},
|
||||
[],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_compiler_verify_high_short_sls(minion_opts, tmp_path, high, exp):
|
||||
"""
|
||||
Test Compiler.verify_high when using
|
||||
a shorter sls with know extra arguments
|
||||
"""
|
||||
mminion = salt.minion.MasterMinion(minion_opts)
|
||||
comp = salt.state.Compiler(minion_opts, mminion.rend)
|
||||
ret = comp.verify_high(high)
|
||||
# empty is successful. Means we have no errors
|
||||
assert ret == exp
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"high,exp",
|
||||
[
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"require",
|
||||
[OrderedDict([("local", "add_test_1")])],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict([("require", {"local": "add_test_1"})]),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[
|
||||
"The require statement in state 'add_test_2' in SLS '/srv/reactor/start.sls' needs to be formed as a list"
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local.cmd.run",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict([("require", {"local": "add_test_1"})]),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[
|
||||
"The require statement in state 'add_test_2' in SLS '/srv/reactor/start.sls' needs to be formed as a list",
|
||||
"Too many functions declared in state 'local.cmd.run' in SLS '/srv/reactor/start.sls'",
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[("args", ([("cmd", "touch /tmp/test1")]))]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict([("require", ([("local", "add_test_1")]))]),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[
|
||||
"Requisite declaration ('local', 'add_test_1') in SLS /srv/reactor/start.sls is not formed as a single key dictionary"
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"require",
|
||||
[
|
||||
OrderedDict(
|
||||
[("local", (["add_test_1"]))]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
["Illegal requisite \"['add_test_1']\", is SLS /srv/reactor/start.sls\n"],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"require",
|
||||
[OrderedDict([("local", "add_test_2")])],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[
|
||||
'A recursive requisite was found, SLS "/srv/reactor/start.sls" ID "add_test_2" ID "add_test_2"'
|
||||
],
|
||||
),
|
||||
(
|
||||
{
|
||||
"add_test_1": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test1")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
"add_test_2": OrderedDict(
|
||||
[
|
||||
(
|
||||
"local",
|
||||
[
|
||||
OrderedDict([("tgt", "poc-minion")]),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"args",
|
||||
[
|
||||
OrderedDict(
|
||||
[("cmd", "touch /tmp/test2")]
|
||||
)
|
||||
],
|
||||
)
|
||||
]
|
||||
),
|
||||
OrderedDict(
|
||||
[
|
||||
(
|
||||
"require",
|
||||
[OrderedDict([("local", "add_test_1")])],
|
||||
)
|
||||
]
|
||||
),
|
||||
"cmd.run",
|
||||
],
|
||||
),
|
||||
("__sls__", "/srv/reactor/start.sls"),
|
||||
]
|
||||
),
|
||||
},
|
||||
[],
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_compiler_verify_high_sls_requisites(minion_opts, tmp_path, high, exp):
|
||||
"""
|
||||
Test Compiler.verify_high when using
|
||||
a sls with requisites
|
||||
"""
|
||||
mminion = salt.minion.MasterMinion(minion_opts)
|
||||
comp = salt.state.Compiler(minion_opts, mminion.rend)
|
||||
ret = comp.verify_high(high)
|
||||
# empty is successful. Means we have no errors
|
||||
assert ret == exp
|
|
@ -38,6 +38,16 @@ def test_format_log_non_ascii_character():
|
|||
salt.state.format_log(ret)
|
||||
|
||||
|
||||
def test_format_log_list(caplog):
|
||||
"""
|
||||
Test running format_log when ret is not a dictionary
|
||||
"""
|
||||
ret = ["test1", "test2"]
|
||||
salt.state.format_log(ret)
|
||||
assert "INFO" in caplog.text
|
||||
assert f"{ret}" in caplog.text
|
||||
|
||||
|
||||
def test_render_error_on_invalid_requisite(minion_opts):
|
||||
"""
|
||||
Test that the state compiler correctly deliver a rendering
|
||||
|
|
|
@ -8,6 +8,7 @@ import tornado.gen
|
|||
import tornado.testing
|
||||
|
||||
import salt.minion
|
||||
import salt.modules.test as test_mod
|
||||
import salt.syspaths
|
||||
import salt.utils.crypt
|
||||
import salt.utils.event as event
|
||||
|
@ -1109,3 +1110,19 @@ async def test_syndic_async_req_channel(syndic_opts):
|
|||
syndic.pub_channel = MagicMock()
|
||||
syndic.tune_in_no_block()
|
||||
assert isinstance(syndic.async_req_channel, salt.channel.client.AsyncReqChannel)
|
||||
|
||||
|
||||
@pytest.mark.slow_test
|
||||
def test_load_args_and_kwargs(minion_opts):
|
||||
"""
|
||||
Ensure load_args_and_kwargs performs correctly
|
||||
"""
|
||||
_args = [{"max": 40, "__kwarg__": True}]
|
||||
ret = salt.minion.load_args_and_kwargs(test_mod.rand_sleep, _args)
|
||||
assert ret == ([], {"max": 40})
|
||||
assert all([True if "__kwarg__" in item else False for item in _args])
|
||||
|
||||
# Test invalid arguments
|
||||
_args = [{"max_sleep": 40, "__kwarg__": True}]
|
||||
with pytest.raises(salt.exceptions.SaltInvocationError):
|
||||
ret = salt.minion.load_args_and_kwargs(test_mod.rand_sleep, _args)
|
||||
|
|
29
tests/pytests/unit/utils/test_user.py
Normal file
29
tests/pytests/unit/utils/test_user.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
pytest.importorskip("grp")
|
||||
|
||||
import grp
|
||||
|
||||
import salt.utils.user
|
||||
|
||||
|
||||
def test_get_group_list():
|
||||
getpwname = SimpleNamespace(pw_gid=1000)
|
||||
getgrgid = MagicMock(side_effect=[SimpleNamespace(gr_name="remote")])
|
||||
group_lines = [
|
||||
["games", "x", 50, []],
|
||||
["salt", "x", 1000, []],
|
||||
]
|
||||
getgrall = [grp.struct_group(comps) for comps in group_lines]
|
||||
with patch("os.getgrouplist", MagicMock(return_value=[50, 1000, 12000])), patch(
|
||||
"pwd.getpwnam", MagicMock(return_value=getpwname)
|
||||
), patch("salt.utils.user._getgrall", MagicMock(return_value=getgrall)), patch(
|
||||
"grp.getgrgid", getgrgid
|
||||
):
|
||||
group_list = salt.utils.user.get_group_list("salt")
|
||||
assert group_list == ["games", "remote", "salt"]
|
||||
getgrgid.assert_called_once()
|
|
@ -10,6 +10,7 @@ import os
|
|||
import pathlib
|
||||
import shutil
|
||||
import tarfile
|
||||
import tempfile
|
||||
import zipfile
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
|
@ -94,8 +95,18 @@ def debian(
|
|||
os.environ[key] = value
|
||||
env_args.extend(["-e", key])
|
||||
|
||||
ctx.run("ln", "-sf", "pkg/debian/", ".")
|
||||
ctx.run("debuild", *env_args, "-uc", "-us")
|
||||
constraints = ["setuptools-scm<8"]
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", prefix="reqs-constraints-", suffix=".txt", delete=False
|
||||
) as tfile:
|
||||
with open(tfile.name, "w", encoding="utf-8") as wfh:
|
||||
for req in constraints:
|
||||
wfh.write(f"{req}\n")
|
||||
env = os.environ.copy()
|
||||
env["PIP_CONSTRAINT"] = str(tfile.name)
|
||||
|
||||
ctx.run("ln", "-sf", "pkg/debian/", ".")
|
||||
ctx.run("debuild", *env_args, "-uc", "-us", env=env)
|
||||
|
||||
ctx.info("Done")
|
||||
|
||||
|
@ -160,8 +171,20 @@ def rpm(
|
|||
for key, value in new_env.items():
|
||||
os.environ[key] = value
|
||||
|
||||
spec_file = checkout / "pkg" / "rpm" / "salt.spec"
|
||||
ctx.run("rpmbuild", "-bb", f"--define=_salt_src {checkout}", str(spec_file))
|
||||
constraints = ["setuptools-scm<8"]
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", prefix="reqs-constraints-", suffix=".txt", delete=False
|
||||
) as tfile:
|
||||
with open(tfile.name, "w", encoding="utf-8") as wfh:
|
||||
for req in constraints:
|
||||
wfh.write(f"{req}\n")
|
||||
env = os.environ.copy()
|
||||
env["PIP_CONSTRAINT"] = str(tfile.name)
|
||||
|
||||
spec_file = checkout / "pkg" / "rpm" / "salt.spec"
|
||||
ctx.run(
|
||||
"rpmbuild", "-bb", f"--define=_salt_src {checkout}", str(spec_file), env=env
|
||||
)
|
||||
|
||||
ctx.info("Done")
|
||||
|
||||
|
@ -549,40 +572,51 @@ def onedir_dependencies(
|
|||
)
|
||||
_check_pkg_build_files_exist(ctx, requirements_file=requirements_file)
|
||||
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-U",
|
||||
"wheel",
|
||||
)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-U",
|
||||
"pip>=22.3.1,<23.0",
|
||||
)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-U",
|
||||
"setuptools>=65.6.3,<66",
|
||||
)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
*install_args,
|
||||
"-r",
|
||||
str(requirements_file),
|
||||
env=env,
|
||||
)
|
||||
constraints = ["setuptools-scm<8"]
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", prefix="reqs-constraints-", suffix=".txt", delete=False
|
||||
) as tfile:
|
||||
with open(tfile.name, "w", encoding="utf-8") as wfh:
|
||||
for req in constraints:
|
||||
wfh.write(f"{req}\n")
|
||||
env["PIP_CONSTRAINT"] = str(tfile.name)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-U",
|
||||
"wheel",
|
||||
env=env,
|
||||
)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-U",
|
||||
"pip>=22.3.1,<23.0",
|
||||
env=env,
|
||||
)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
"-U",
|
||||
"setuptools>=65.6.3,<66",
|
||||
env=env,
|
||||
)
|
||||
ctx.run(
|
||||
str(python_bin),
|
||||
"-m",
|
||||
"pip",
|
||||
"install",
|
||||
*install_args,
|
||||
"-r",
|
||||
str(requirements_file),
|
||||
env=env,
|
||||
)
|
||||
|
||||
|
||||
@build.command(
|
||||
|
|
Loading…
Add table
Reference in a new issue