Merge branch '3006.x' into merge/3007.x/3006.x

This commit is contained in:
Daniel A. Wozniak 2025-03-07 15:05:05 -07:00
commit c695e0bcff
114 changed files with 1444 additions and 1254 deletions

View file

@ -29,21 +29,13 @@ runs:
steps:
- name: Download Cached Deps Onedir Package Directory
id: onedir-bare-cache
uses: ./.github/actions/cache
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
path: artifacts/${{ inputs.package-name }}
key: >
${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{
hashFiles(
format('{0}/.relenv/**/*.xz', github.workspace),
'requirements/static/pkg/*/*.txt',
'.github/actions/build-onedir-deps/action.yml',
'.github/workflows/build-deps-onedir-*.yml',
'cicd/shared-gh-workflows-context.yml'
)
}}
platform: ${{ inputs.platform }}
arch: ${{ inputs.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ inputs.salt-version }}
- name: Download Source Tarball
uses: actions/download-artifact@v4

View file

@ -92,3 +92,9 @@ OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0
dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9
-- Message received --
```
SSH to your local port.
```
ssh -o StrictHostKeychecking=no -o TCPKeepAlive=no -o StrictHostKeyChecking=no -vv -p 5222 runner@localhost
```

View file

@ -6,6 +6,7 @@ import io
import json
import logging
import os
import signal
import sys
import textwrap
import time
@ -77,6 +78,42 @@ def print_pastable(data, message="offer"):
sys.stdout.flush()
async def read_from_stdin():
loop = asyncio.get_event_loop()
line = await loop.run_in_executor(
None, input, "-- Please enter a message from remote party --\n"
)
data = line
while line:
try:
line = await loop.run_in_executor(None, input)
except EOFError:
break
data += line
print("-- Message received --")
return data
class Channels:
def __init__(self, channels=None):
if channels is None:
channels = []
self.channels = channels
def add(self, channel):
self.channels.append(channel)
def close(self):
for channel in self.channels:
channel.close()
class ProxyConnection:
def __init__(self, pc, channel):
self.pc = pc
self.channel = channel
class ProxyClient:
def __init__(self, args, channel):
@ -219,29 +256,7 @@ class ProxyServer:
log.exception("WTF")
class ProxyConnection:
def __init__(self, pc, channel):
self.pc = pc
self.channel = channel
async def read_from_stdin():
loop = asyncio.get_event_loop()
line = await loop.run_in_executor(
None, input, "-- Please enter a message from remote party --\n"
)
data = line
while line:
try:
line = await loop.run_in_executor(None, input)
except EOFError:
break
data += line
print("-- Message received --")
return data
async def run_answer(pc, args):
async def run_answer(stop, pc, args):
"""
Top level offer answer server.
"""
@ -270,11 +285,11 @@ async def run_answer(pc, args):
elif obj is BYE:
print("Exiting")
while True:
while not stop.is_set():
await asyncio.sleep(0.3)
async def run_offer(pc, args):
async def run_offer(stop, pc, args):
"""
Top level offer server this will estabilsh a data channel and start a tcp
server on the port provided. New connections to the server will start the
@ -324,10 +339,14 @@ async def run_offer(pc, args):
elif obj is BYE:
print("Exiting")
while True:
while not stop.is_set():
await asyncio.sleep(0.3)
async def signal_handler(stop, pc):
stop.set()
if __name__ == "__main__":
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
@ -343,16 +362,22 @@ if __name__ == "__main__":
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
stop = asyncio.Event()
pc = RTCPeerConnection()
if args.role == "offer":
coro = run_offer(pc, args)
coro = run_offer(stop, pc, args)
else:
coro = run_answer(pc, args)
coro = run_answer(stop, pc, args)
# run event loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
for signame in ("SIGINT", "SIGTERM"):
loop.add_signal_handler(
getattr(signal, signame),
lambda: asyncio.create_task(signal_handler(stop, pc)),
)
try:
loop.run_until_complete(coro)
except KeyboardInterrupt:

View file

@ -1,192 +0,0 @@
---
name: Build Packaging Dependencies Onedir
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
cache-seed:
required: true
type: string
description: Seed used to invalidate caches
relenv-version:
required: true
type: string
description: The version of relenv to use
python-version:
required: true
type: string
description: The version of python to use with relenv
matrix:
required: true
type: string
description: Json job matrix config
linux_arm_runner:
required: true
type: string
description: Json job matrix config
env:
RELENV_DATA: "${{ github.workspace }}/.relenv"
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
jobs:
build-deps-linux:
name: Linux
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
runs-on:
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(inputs.matrix)['linux'] }}
env:
USE_S3_CACHE: 'false'
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}|build-deps|linux|${{ matrix.arch }}
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: linux
arch: ${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: linux
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}
build-deps-macos:
name: macOS
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
include: ${{ fromJSON(inputs.matrix)['macos'] }}
runs-on:
- ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
env:
USE_S3_CACHE: 'false'
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Check cores"
shell: bash
run: sysctl -n hw.ncpu
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}|build-deps|macos
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: macos
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: macos
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}
build-deps-windows:
name: Windows
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
strategy:
fail-fast: false
max-parallel: 2
matrix:
include: ${{ fromJSON(inputs.matrix)['windows'] }}
runs-on: windows-latest
env:
USE_S3_CACHE: 'false'
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-seed }}|build-deps|windows|${{ matrix.arch }}
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt Packaging Dependencies into Relenv Onedir
uses: ./.github/actions/build-onedir-deps
with:
platform: windows
arch: ${{ matrix.arch }}
python-version: "${{ inputs.python-version }}"
cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }}

View file

@ -108,7 +108,7 @@ jobs:
- ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
env:
PIP_INDEX_URL: https://pypi.org/simple
USE_S3_CACHE: 'false'
steps:
- name: "Check cores"
shell: bash
@ -125,16 +125,6 @@ jobs:
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: macos
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
@ -146,6 +136,16 @@ jobs:
with:
salt-version: "${{ inputs.salt-version }}"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: macos
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:
@ -166,7 +166,7 @@ jobs:
runs-on: windows-latest
env:
PIP_INDEX_URL: https://pypi.org/simple
USE_S3_CACHE: 'false'
steps:
- name: "Throttle Builds"
@ -181,16 +181,6 @@ jobs:
with:
python-version: "3.10"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
@ -202,6 +192,16 @@ jobs:
with:
salt-version: "${{ inputs.salt-version }}"
- name: Setup Relenv
id: setup-relenv
uses: ./.github/actions/setup-relenv
with:
platform: windows
arch: ${{ matrix.arch }}
version: ${{ inputs.relenv-version }}
cache-seed: ${{ inputs.cache-seed }}
python-version: ${{ inputs.python-version }}
- name: Install Salt into Relenv Onedir
uses: ./.github/actions/build-onedir-salt
with:

View file

@ -239,7 +239,7 @@ jobs:
lint:
name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
uses: ./.github/workflows/lint-action.yml
needs:
- prepare-workflow
@ -257,7 +257,7 @@ jobs:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
runs-on:
- ubuntu-22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
needs:
- prepare-workflow
steps:
@ -379,7 +379,7 @@ jobs:
build-docs:
name: Documentation
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
needs:
- prepare-workflow
- build-source-tarball
@ -390,7 +390,7 @@ jobs:
build-source-tarball:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
needs:
- prepare-workflow
- prepare-release
@ -419,33 +419,18 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
build-deps-onedir:
name: Build Onedir Dependencies
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
build-salt-onedir:
name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
needs:
- prepare-workflow
- build-deps-onedir
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -459,8 +444,8 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
source: "onedir"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -476,7 +461,7 @@ jobs:
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -493,7 +478,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
@ -511,7 +496,7 @@ jobs:
python-version: "3.10"
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}
workflow-slug: ci
default-timeout: 180
@ -520,7 +505,7 @@ jobs:
combine-all-code-coverage:
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
runs-on: ubuntu-22.04
env:
PIP_INDEX_URL: https://pypi.org/simple
@ -650,7 +635,7 @@ jobs:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
if: ${{ !cancelled() && always() }}
runs-on: ubuntu-22.04
needs:
- prepare-workflow
@ -658,7 +643,6 @@ jobs:
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir
- combine-all-code-coverage
- build-ci-deps

View file

@ -296,7 +296,7 @@ jobs:
lint:
name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
uses: ./.github/workflows/lint-action.yml
needs:
- prepare-workflow
@ -314,7 +314,7 @@ jobs:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
runs-on:
- ubuntu-22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
needs:
- prepare-workflow
steps:
@ -436,7 +436,7 @@ jobs:
build-docs:
name: Documentation
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
needs:
- prepare-workflow
- build-source-tarball
@ -447,7 +447,7 @@ jobs:
build-source-tarball:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
needs:
- prepare-workflow
- prepare-release
@ -476,33 +476,18 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
build-deps-onedir:
name: Build Onedir Dependencies
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
build-salt-onedir:
name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
needs:
- prepare-workflow
- build-deps-onedir
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -516,8 +501,8 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
source: "onedir"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -536,8 +521,8 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
source: "src"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -557,7 +542,7 @@ jobs:
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -574,7 +559,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
@ -592,7 +577,7 @@ jobs:
python-version: "3.10"
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: true
workflow-slug: nightly
default-timeout: 360
@ -602,7 +587,7 @@ jobs:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
if: ${{ !cancelled() && always() }}
runs-on: ubuntu-22.04
environment: nightly
needs:
@ -613,7 +598,6 @@ jobs:
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir
- build-pkgs-src
- build-ci-deps

View file

@ -48,14 +48,14 @@ jobs:
cache-seed: ${{ inputs.cache-seed }}
- name: Check ALL Files On Branch
if: github.event_name != 'pull_request'
if: ${{ !cancelled() && github.event_name != 'pull_request' }}
env:
SKIP: lint-salt,lint-tests,remove-import-headers,pyupgrade
run: |
pre-commit run --show-diff-on-failure --color=always --all-files
- name: Check Changed Files On PR
if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo']
if: ${{ !cancelled() && github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['repo'] }}
env:
SKIP: lint-salt,lint-tests
GH_ACTIONS_ANNOTATE: "1"
@ -63,6 +63,6 @@ jobs:
pre-commit run --show-diff-on-failure --color=always --files ${{ join(fromJSON(inputs.changed-files)['repo_files'], ' ') }}
- name: Check Docs On Deleted Files
if: github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['deleted']
if: ${{ !cancelled() && github.event_name == 'pull_request' && fromJSON(inputs.changed-files)['deleted'] }}
run: |
pre-commit run --show-diff-on-failure --color=always check-docs --files ${{ join(fromJSON(inputs.changed-files)['deleted_files'], ' ') }}

View file

@ -186,7 +186,7 @@ jobs:
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -422,7 +422,7 @@ jobs:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
if: ${{ !cancelled() && always() }}
runs-on: ubuntu-22.04
needs:
- check-requirements

View file

@ -286,7 +286,7 @@ jobs:
lint:
name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
uses: ./.github/workflows/lint-action.yml
needs:
- prepare-workflow
@ -304,7 +304,7 @@ jobs:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
runs-on:
- ubuntu-22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
needs:
- prepare-workflow
steps:
@ -426,7 +426,7 @@ jobs:
build-docs:
name: Documentation
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
needs:
- prepare-workflow
- build-source-tarball
@ -437,7 +437,7 @@ jobs:
build-source-tarball:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
needs:
- prepare-workflow
- prepare-release
@ -466,33 +466,18 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
build-deps-onedir:
name: Build Onedir Dependencies
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
build-salt-onedir:
name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
needs:
- prepare-workflow
- build-deps-onedir
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -506,8 +491,8 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
source: "onedir"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -523,7 +508,7 @@ jobs:
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -540,7 +525,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
@ -558,7 +543,7 @@ jobs:
python-version: "3.10"
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: true
workflow-slug: scheduled
default-timeout: 360
@ -568,7 +553,7 @@ jobs:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
if: ${{ !cancelled() && always() }}
runs-on: ubuntu-22.04
needs:
- workflow-requirements
@ -578,7 +563,6 @@ jobs:
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir
- build-ci-deps
- test-packages

View file

@ -33,6 +33,22 @@ jobs:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ssh-debug
- name: Install Nox
run: |
python3 -m pip install 'nox==2022.8.7'
env:
PIP_INDEX_URL: https://pypi.org/simple
- uses: ./.github/actions/ssh-tunnel
with:
public_key: ${{ inputs.public_key }}

View file

@ -278,7 +278,7 @@ jobs:
lint:
name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['lint'] }}
uses: ./.github/workflows/lint-action.yml
needs:
- prepare-workflow
@ -296,7 +296,7 @@ jobs:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
runs-on:
- ubuntu-22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['prepare-release'] }}
needs:
- prepare-workflow
steps:
@ -419,7 +419,7 @@ jobs:
build-docs:
name: Documentation
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-docs'] }}
needs:
- prepare-workflow
- build-source-tarball
@ -430,7 +430,7 @@ jobs:
build-source-tarball:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-source-tarball'] }}
needs:
- prepare-workflow
- prepare-release
@ -459,33 +459,18 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
build-deps-onedir:
name: Build Onedir Dependencies
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-onedir'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
build-salt-onedir:
name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-salt-onedir'] }}
needs:
- prepare-workflow
- build-deps-onedir
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -499,8 +484,8 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
source: "onedir"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -519,8 +504,8 @@ jobs:
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
relenv-version: "0.18.0"
python-version: "3.10.15"
relenv-version: "0.18.1"
python-version: "3.10.16"
source: "src"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -540,7 +525,7 @@ jobs:
nox-version: 2022.8.7
python-version: "3.10"
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
@ -557,7 +542,7 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
nox-version: 2022.8.7
python-version: "3.10"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: true
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
@ -575,7 +560,7 @@ jobs:
python-version: "3.10"
testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.16
skip-code-coverage: true
workflow-slug: staging
default-timeout: 180
@ -616,7 +601,7 @@ jobs:
publish-pypi:
name: Publish to PyPi(test)
if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }}
if: ${{ !cancelled() && inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }}
needs:
- prepare-workflow
- upload-release-artifacts
@ -670,11 +655,10 @@ jobs:
draft-release:
name: Draft Github Release
if: |
always() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
if: ${{ !cancelled() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') &&
needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' &&
needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success'
needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' }}
needs:
- prepare-workflow
- pre-commit
@ -695,7 +679,7 @@ jobs:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
if: ${{ !cancelled() && always() }}
runs-on: ubuntu-22.04
needs:
- check-requirements
@ -704,7 +688,6 @@ jobs:
- lint
- nsis-tests
- build-docs
- build-deps-onedir
- build-salt-onedir
- build-pkgs-src
- upload-release-artifacts

View file

@ -29,7 +29,7 @@
lint:
<%- do conclusion_needs.append('lint') %>
name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
uses: ./.github/workflows/lint-action.yml
needs:
- prepare-workflow
@ -58,7 +58,7 @@
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
runs-on:
- ubuntu-22.04
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs:
- prepare-workflow
steps:
@ -190,7 +190,7 @@
<{ job_name }>:
<%- do conclusion_needs.append(job_name) %>
name: Documentation
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs:
- prepare-workflow
- build-source-tarball
@ -207,7 +207,7 @@
<{ job_name }>:
name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs:
- prepare-workflow
- prepare-release
@ -238,37 +238,15 @@
<%- endif %>
<%- set job_name = "build-deps-onedir" %>
<%- if includes.get(job_name, True) %>
<{ job_name }>:
<%- do conclusion_needs.append(job_name) %>
name: Build Onedir Dependencies
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs:
- prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml
with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
<%- endif %>
<%- set job_name = "build-salt-onedir" %>
<%- if includes.get(job_name, True) %>
<{ job_name }>:
<%- do conclusion_needs.append(job_name) %>
name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs:
- prepare-workflow
- build-deps-onedir
- build-source-tarball
uses: ./.github/workflows/build-salt-onedir.yml
with:
@ -306,7 +284,7 @@
combine-all-code-coverage:
<%- do conclusion_needs.append("combine-all-code-coverage") %>
name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
if: ${{ !cancelled() && fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
runs-on: ubuntu-22.04
env:
PIP_INDEX_URL: https://pypi.org/simple

View file

@ -319,7 +319,7 @@ jobs:
# This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always()
if: ${{ !cancelled() && always() }}
runs-on: ubuntu-22.04
<%- if workflow_slug == "nightly" %>
environment: <{ workflow_slug }>

View file

@ -116,7 +116,7 @@ on:
publish-pypi:
<%- do conclusion_needs.append('publish-pypi') %>
name: Publish to PyPi(test)
if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }}
if: ${{ !cancelled() && inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }}
needs:
- prepare-workflow
- upload-release-artifacts
@ -178,11 +178,10 @@ on:
draft-release:
name: Draft Github Release
if: |
always() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
if: ${{ !cancelled() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') &&
needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' &&
needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success'
needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success' }}
needs:
- prepare-workflow
- pre-commit

View file

@ -71,7 +71,7 @@ jobs:
test-linux:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
if: toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]'
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux-x86_64']) != '[]' }}
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 6 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
@ -80,10 +80,6 @@ jobs:
matrix:
include: ${{ fromJSON(inputs.matrix)['linux-x86_64'] }}
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: "Throttle Builds"
shell: bash
@ -98,6 +94,16 @@ jobs:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
@ -154,39 +160,28 @@ jobs:
free -h
- name: "Create container ${{ matrix.container }}"
env:
GITHUB_ACTIONS: true
CI: true
SKIP_REQUIREMENTS_INSTALL: 1
PRINT_TEST_SELECTION: 0
PRINT_TEST_PLAN_ONLY: 0
PRINT_SYSTEM_INFO: 0
RERUN_FAILURES: 1
GITHUB_ACTIONS_PIPELINE: 1
SKIP_INITIAL_ONEDIR_FAILURES: 1
SKIP_INITIAL_GH_ACTIONS_FAILURES: 1
SKIP_CODE_COVERAGE: ${{ inputs.skip-code-coverage && '1' || '0' }}
CONVERAGE_CONTEXT: ${{ matrix.slug }}
COLUMNS: 190
PIP_INDEX_URL: "${{ vars.PIP_INDEX_URL }}"
PIP_TRUSTED_HOST: "${{ vars.PIP_TRUSTED_HOST }}"
PIP_EXTRA_INDEX_URL: "${{ vars.PIP_EXTRA_INDEX_URL }}"
PIP_DISABLE_PIP_VERSION_CHECK: 1
RAISE_DEPRECATIONS_RUNTIME_ERRORS: 1
SALT_TRANSPORT: ${{ matrix.transport }}
run: |
/usr/bin/docker \
create --name ${{ github.run_id }}_salt-test \
--workdir /__w/salt/salt \
--privileged \
--ulimit="nofile=262144:262144" \
-e "HOME=/github/home" \
-e GITHUB_ACTIONS=true \
-e CI=true \
-e SKIP_REQUIREMENTS_INSTALL=1 \
-e PRINT_TEST_SELECTION=0 \
-e PRINT_TEST_PLAN_ONLY=0 \
-e PRINT_SYSTEM_INFO=0 \
-e RERUN_FAILURES=1 \
-e GITHUB_ACTIONS_PIPELINE=1 \
-e SKIP_INITIAL_ONEDIR_FAILURES=1 \
-e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \
-e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \
-e CONVERAGE_CONTEXT=${{ matrix.slug }} \
-e COLUMNS=190 \
-e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \
-e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \
-e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \
-e PIP_DISABLE_PIP_VERSION_CHECK="1" \
-e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \
-e SALT_TRANSPORT=${{ matrix.transport }} \
-e LANG="en_US.UTF-8" \
-e SHELL=/bin/bash \
-v "/home/runner/work":"/__w" \
-v "/tmp/":"/var/lib/docker" \
--entrypoint "/usr/lib/systemd/systemd" \
${{ matrix.container }} \
--systemd --unit rescue.target
tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test
- name: "Start container ${{ matrix.container }}"
run: |
@ -387,7 +382,7 @@ jobs:
test-linux-arm64:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.fips && '(fips)' || '' }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-22.04' || inputs.linux_arm_runner }}
if: toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]'
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux-arm64']) != '[]' }}
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 6 Hours
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
@ -396,10 +391,6 @@ jobs:
matrix:
include: ${{ fromJSON(inputs.matrix)['linux-arm64'] }}
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: "Throttle Builds"
shell: bash
@ -411,9 +402,25 @@ jobs:
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Free Disk Space Before Build
run: |
echo "Disk space before cleanup:"
@ -470,39 +477,28 @@ jobs:
free -h
- name: "Create container ${{ matrix.container }}"
env:
GITHUB_ACTIONS: true
CI: true
SKIP_REQUIREMENTS_INSTALL: 1
PRINT_TEST_SELECTION: 0
PRINT_TEST_PLAN_ONLY: 0
PRINT_SYSTEM_INFO: 0
RERUN_FAILURES: 1
GITHUB_ACTIONS_PIPELINE: 1
SKIP_INITIAL_ONEDIR_FAILURES: 1
SKIP_INITIAL_GH_ACTIONS_FAILURES: 1
SKIP_CODE_COVERAGE: ${{ inputs.skip-code-coverage && '1' || '0' }}
CONVERAGE_CONTEXT: ${{ matrix.slug }}
COLUMNS: 190
PIP_INDEX_URL: "${{ vars.PIP_INDEX_URL }}"
PIP_TRUSTED_HOST: "${{ vars.PIP_TRUSTED_HOST }}"
PIP_EXTRA_INDEX_URL: "${{ vars.PIP_EXTRA_INDEX_URL }}"
PIP_DISABLE_PIP_VERSION_CHECK: 1
RAISE_DEPRECATIONS_RUNTIME_ERRORS: 1
SALT_TRANSPORT: ${{ matrix.transport }}
run: |
/usr/bin/docker \
create --name ${{ github.run_id }}_salt-test \
--workdir /__w/salt/salt \
--privileged \
--ulimit="nofile=262144:262144" \
-e "HOME=/github/home" \
-e GITHUB_ACTIONS=true \
-e CI=true \
-e SKIP_REQUIREMENTS_INSTALL=1 \
-e PRINT_TEST_SELECTION=0 \
-e PRINT_TEST_PLAN_ONLY=0 \
-e PRINT_SYSTEM_INFO=0 \
-e RERUN_FAILURES=1 \
-e GITHUB_ACTIONS_PIPELINE=1 \
-e SKIP_INITIAL_ONEDIR_FAILURES=1 \
-e SKIP_INITIAL_GH_ACTIONS_FAILURES=1 \
-e SKIP_CODE_COVERAGE=${{ inputs.skip-code-coverage && '1' || '0' }} \
-e CONVERAGE_CONTEXT=${{ matrix.slug }} \
-e COLUMNS=190 \
-e PIP_INDEX_URL=${{ vars.PIP_INDEX_URL }} \
-e PIP_TRUSTED_HOST=${{ vars.PIP_TRUSTED_HOST }} \
-e PIP_EXTRA_INDEX_URL=${{ vars.PIP_EXTRA_INDEX_URL }} \
-e PIP_DISABLE_PIP_VERSION_CHECK="1" \
-e RAISE_DEPRECATIONS_RUNTIME_ERRORS="1" \
-e SALT_TRANSPORT=${{ matrix.transport }} \
-e LANG="en_US.UTF-8" \
-e SHELL=/bin/bash \
-v "/home/runner/work":"/__w" \
-v "/tmp/":"/var/lib/docker" \
--entrypoint "/usr/lib/systemd/systemd" \
${{ matrix.container }} \
--systemd --unit rescue.target
tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test
- name: "Start container ${{ matrix.container }}"
run: |
@ -705,7 +701,7 @@ jobs:
runs-on: ${{ matrix.runner }}
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 6 Hours
if: toJSON(fromJSON(inputs.matrix)['macos']) != '[]'
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }}
strategy:
fail-fast: false
@ -726,6 +722,11 @@ jobs:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Setup Salt Version
run: |
echo "${{ inputs.salt-version }}" > salt/_version.txt
@ -751,12 +752,6 @@ jobs:
uses: actions/download-artifact@v4
with:
name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
@ -983,7 +978,7 @@ jobs:
test-windows:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }} ${{ matrix.transport }}${{ matrix.test-group && ' ' || '' }}${{ matrix.test-group && matrix.test-group || '' }}
if: toJSON(fromJSON(inputs.matrix)['windows']) != '[]'
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
runs-on: ${{ matrix.slug }}
# Full test runs. Each chunk should never take more than 2 hours.
# Partial test runs(no chunk parallelization), 6 Hours
@ -1031,7 +1026,7 @@ jobs:
run: |
echo true
- name: Set up Python ${{ inputs.python-version }}
- name: Setup Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
@ -1349,7 +1344,7 @@ jobs:
run: |
tree -a artifacts
- name: Set up Python ${{ inputs.python-version }}
- name: Setup Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"

View file

@ -65,7 +65,7 @@ jobs:
test-linux:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
@ -86,6 +86,16 @@ jobs:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Download Packages
uses: actions/download-artifact@v4
with:
@ -105,11 +115,6 @@ jobs:
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
@ -135,7 +140,7 @@ jobs:
- name: "Create container ${{ matrix.container }}"
run: |
/usr/bin/docker create --name ${{ github.run_id }}_salt-test-pkg --workdir /__w/salt/salt --privileged -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true -v "/var/run/docker.sock":"/var/run/docker.sock" -v "/home/runner/work":"/__w" -v "/home/runner/work/_temp":"/__w/_temp" -v "/home/runner/work/_actions":"/__w/_actions" -v "/opt/hostedtoolcache":"/__t" -v "/home/runner/work/_temp/_github_home":"/github/home" -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" --entrypoint "/usr/lib/systemd/systemd" ${{ matrix.container }} --systemd --unit rescue.target
tools container create ${{ matrix.container }} --name ${{ github.run_id }}_salt-test-pkg
- name: "Start container ${{ matrix.container }}"
run: |
@ -145,31 +150,21 @@ jobs:
run: |
docker exec ${{ github.run_id}}_salt-test-pkg python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: List Free Space
run: |
df -h || true
- name: Show System Info
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_SYSTEM_INFO_ONLY: "1"
run: |
docker exec ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
docker exec \
-e SKIP_REQUIREMENTS_INSTALL=1 \
-e PRINT_SYSTEM_INFO_ONLY=1 \
${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
- name: Run Package Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
COVERAGE_CONTEXT: ${{ matrix.slug }}
run: |
/usr/bin/docker exec ${{ github.run_id }}_salt-test-pkg \
docker exec \
${{ github.run_id }}_salt-test-pkg \
python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
@ -197,7 +192,7 @@ jobs:
test-macos:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
runs-on: ${{ matrix.runner }}
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time)
strategy:
fail-fast: false
@ -325,7 +320,7 @@ jobs:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
runs-on: ${{ matrix.slug }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
if: ${{ !cancelled() && toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
strategy:
fail-fast: false
matrix:

View file

@ -30,7 +30,7 @@ repos:
# case-insensitive filesystem like MacOS HFS+ or Windows FAT.
- id: check-json # Attempts to load all json files to verify syntax.
- id: check-symlinks # Checks for symlinks which do not point to anything.
- id: debug-statements # Check for debugger imports and py37+ breakpoint() calls in python source.
- id: debug-statements # Check for debugger imports and breakpoint() calls in python source.
exclude: >
(?x)^(
templates/.*
@ -145,21 +145,6 @@ repos:
rev: "4.8"
hooks:
- id: pip-tools-compile
alias: compile-pkg-linux-3.8-zmq-requirements
name: Linux Packaging Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.8/linux\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --no-emit-index-url
- requirements/static/pkg/linux.in
- id: pip-tools-compile
alias: compile-pkg-linux-3.9-zmq-requirements
name: Linux Packaging Py3.9 ZeroMQ Requirements
@ -220,22 +205,6 @@ repos:
- --no-emit-index-url
- requirements/static/pkg/linux.in
- id: pip-tools-compile
alias: compile-pkg-freebsd-3.8-zmq-requirements
name: FreeBSD Packaging Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.8/freebsd\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=freebsd
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --no-emit-index-url
- requirements/static/pkg/freebsd.in
- id: pip-tools-compile
alias: compile-pkg-freebsd-3.9-zmq-requirements
name: FreeBSD Packaging Py3.9 ZeroMQ Requirements
@ -352,20 +321,6 @@ repos:
- --no-emit-index-url
- requirements/static/pkg/darwin.in
- id: pip-tools-compile
alias: compile-pkg-windows-3.8-zmq-requirements
name: Windows Packaging Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.8/windows\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=windows
- --include=requirements/windows.txt
- --no-emit-index-url
- requirements/static/pkg/windows.in
- id: pip-tools-compile
alias: compile-pkg-windows-3.9-zmq-requirements
name: Windows Packaging Py3.9 ZeroMQ Requirements
@ -425,23 +380,6 @@ repos:
# <---- Packaging Requirements -------------------------------------------------------------------------------------
# ----- CI Requirements ------------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-ci-linux-3.8-zmq-requirements
name: Linux CI Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.8/linux\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=linux
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --include=requirements/static/pkg/linux.in
- --include=requirements/static/ci/common.in
- --no-emit-index-url
- requirements/static/ci/linux.in
- id: pip-tools-compile
alias: compile-ci-linux-3.9-zmq-requirements
@ -515,20 +453,6 @@ repos:
- --no-emit-index-url
- requirements/static/ci/linux.in
- id: pip-tools-compile
alias: compile-ci-linux-crypto-3.8-requirements
name: Linux CI Py3.8 Crypto Requirements
files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/linux-crypto\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=linux
- --out-prefix=linux
- --no-emit-index-url
- requirements/static/ci/crypto.in
- id: pip-tools-compile
alias: compile-ci-linux-crypto-3.9-requirements
name: Linux CI Py3.9 Crypto Requirements
@ -585,25 +509,6 @@ repos:
- --no-emit-index-url
- requirements/static/ci/crypto.in
- id: pip-tools-compile
alias: compile-ci-freebsd-3.8-zmq-requirements
name: FreeBSD CI Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/((freebsd|common)\.in|py3\.8/freebsd\.txt)))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=freebsd
- --include=requirements/base.txt
- --include=requirements/zeromq.txt
- --include=requirements/pytest.txt
- --include=requirements/static/pkg/freebsd.in
- --include=requirements/static/ci/common.in
- --no-emit-index-url
- requirements/static/ci/freebsd.in
- id: pip-tools-compile
alias: compile-ci-freebsd-3.9-zmq-requirements
name: FreeBSD CI Py3.9 ZeroMQ Requirements
@ -676,23 +581,10 @@ repos:
- --no-emit-index-url
- requirements/static/ci/freebsd.in
- id: pip-tools-compile
alias: compile-ci-freebsd-crypto-3.8-requirements
name: FreeBSD CI Py3.8 Crypto Requirements
files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/freebsd-crypto\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=freebsd
- --out-prefix=freebsd
- --no-emit-index-url
- requirements/static/ci/crypto.in
- id: pip-tools-compile
alias: compile-ci-freebsd-crypto-3.9-requirements
name: FreeBSD CI Py3.9 Crypto Requirements
files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$
files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.9/freebsd-crypto\.txt))$
pass_filenames: false
args:
@ -871,23 +763,6 @@ repos:
- --no-emit-index-url
- requirements/static/ci/crypto.in
- id: pip-tools-compile
alias: compile-ci-windows-3.8-zmq-requirements
name: Windows CI Py3.8 ZeroMQ Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/((windows|common)\.in|py3\.8/windows\.txt)))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=windows
- --include=requirements/windows.txt
- --include=requirements/pytest.txt
- --include=requirements/static/pkg/windows.in
- --include=requirements/static/ci/common.in
- --no-emit-index-url
- requirements/static/ci/windows.in
- id: pip-tools-compile
alias: compile-ci-windows-3.9-zmq-requirements
name: Windows CI Py3.9 ZeroMQ Requirements
@ -956,20 +831,6 @@ repos:
- --no-emit-index-url
- requirements/static/ci/windows.in
- id: pip-tools-compile
alias: compile-ci-windows-crypto-3.8-requirements
name: Windows CI Py3.8 Crypto Requirements
files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.8/windows-crypto\.txt))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=windows
- --out-prefix=windows
- --no-emit-index-url
- requirements/static/ci/crypto.in
- id: pip-tools-compile
alias: compile-ci-windows-crypto-3.9-requirements
name: Windows CI Py3.9 Crypto Requirements
@ -1029,18 +890,6 @@ repos:
# <---- CI Requirements --------------------------------------------------------------------------------------------
# ----- Cloud CI Requirements ------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-ci-cloud-3.8-requirements
name: Cloud CI Py3.8 Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.8/cloud\.txt)))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --no-emit-index-url
- requirements/static/ci/cloud.in
- id: pip-tools-compile
alias: compile-ci-cloud-3.9-requirements
name: Cloud CI Py3.9 Requirements
@ -1096,19 +945,6 @@ repos:
# <---- Cloud CI Requirements --------------------------------------------------------------------------------------
# ----- Doc CI Requirements --------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-doc-requirements
name: Docs CI Py3.8 Requirements
files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=linux
- --no-emit-index-url
- requirements/static/ci/docs.in
- id: pip-tools-compile
alias: compile-doc-requirements
name: Docs CI Py3.9 Requirements
@ -1167,19 +1003,6 @@ repos:
# ----- Lint CI Requirements -------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-ci-lint-3.8-requirements
name: Lint CI Py3.8 Requirements
files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.8/linux\.txt)))$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=linux
- --no-emit-index-url
- requirements/static/ci/lint.in
- id: pip-tools-compile
alias: compile-ci-lint-3.9-requirements
name: Lint CI Py3.9 Requirements
@ -1240,19 +1063,6 @@ repos:
# <---- Lint CI Requirements ---------------------------------------------------------------------------------------
# ----- Changelog ------------------------------------------------------------------------------------------------->
- id: pip-tools-compile
alias: compile-ci-changelog-3.8-requirements
name: Changelog CI Py3.8 Requirements
files: ^requirements/static/ci/(changelog\.in|py3\.8/(changelog|linux)\.txt)$
pass_filenames: false
args:
- -v
- --build-isolation
- --py-version=3.8
- --platform=linux
- --no-emit-index-url
- requirements/static/ci/changelog.in
- id: pip-tools-compile
alias: compile-ci-changelog-3.9-requirements
name: Changelog CI Py3.9 Requirements
@ -1401,8 +1211,8 @@ repos:
rev: v3.15.1
hooks:
- id: pyupgrade
name: Upgrade code for Py3.8+
args: [--py38-plus, --keep-mock]
name: Upgrade code to Py3.10+
args: [--py310-plus, --keep-mock]
exclude: >
(?x)^(
salt/client/ssh/ssh_py_shim.py

2
changelog/44736.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Commands on Windows are now prefixed with ``cmd /c`` so that compound
commands (commands separated by ``&&``) run properly when using ``runas``

2
changelog/59977.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed an issue on Windows where checking success_retcodes when using the
runas parameter would fail. Now success_retcodes are checked correctly

2
changelog/60884.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fix an issue with cmd.script in Windows so that the exit code from a script will
be passed through to the retcode of the state

1
changelog/61416.fixed.md Normal file
View file

@ -0,0 +1 @@
Ensure file clients for runner, wheel, local and caller are available from the client_cache if called upon.

1
changelog/66592.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix minion config option startup_states

4
changelog/66637.fixed.md Normal file
View file

@ -0,0 +1,4 @@
Fixes an issue when getting account names using the get_name function in the
win_dacl.py salt util. Capability SIDs return ``None``. SIDs for deleted
accounts return the SID. SIDs for domain accounts where the system is not
connected to the domain return the SID.

1
changelog/66932.fixed.md Normal file
View file

@ -0,0 +1 @@
Ensure minion start event coroutines are run

1
changelog/67057.fixed.md Normal file
View file

@ -0,0 +1 @@
Added support for dnf5 (backport from 3007) and update to its new command syntax changes since 2023

1
changelog/67091.fixed.md Normal file
View file

@ -0,0 +1 @@
Fix yumpkg module for Python<3.8

1
changelog/67177.fixed.md Normal file
View file

@ -0,0 +1 @@
Added support and tests for dnf5 to services_need_restart for yum packages

View file

@ -0,0 +1 @@
Removed dependency on bsdmainutils package for Debian and Ubuntu

1
changelog/67722.fixed.md Normal file
View file

@ -0,0 +1 @@
Use os.walk to traverse git branches, and no longer replace slash '/' in git branch names

View file

@ -0,0 +1 @@
Removed support for end of life Python 3.7 and 3.8 from pre-commit and requirements

1
changelog/67733.fixed.md Normal file
View file

@ -0,0 +1 @@
Set correct virtual grain in systemd based Podman containers

1
changelog/67743.fixed.md Normal file
View file

@ -0,0 +1 @@
Corrected option --upgrades for dnf[5] for function list_upgrades

1
changelog/67769.fixed.md Normal file
View file

@ -0,0 +1 @@
Corrected dnf5 option --downloadonly for dnf5 install

2
changelog/67792.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Upgrade relenv to 0.18.1. Which includes python 3.10.16 and openssl 3.2.4.
Openssl 3.2.4 fixes CVE-2024-12797 and CVE-2024-13176

2
changelog/67794.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Update jinja2 to 3.1.5, advisories GHSA-q2x7-8rv6-6q7h and GHSA-gmj6-6f8f-6699
Update urllib3 to 1.26.18 advisories GHSA-34jh-p97f-mpxf

View file

@ -1,6 +1,6 @@
nox_version: "2022.8.7"
python_version: "3.10.15"
relenv_version: "0.18.0"
python_version: "3.10.16"
relenv_version: "0.18.1"
release_branches:
- "3006.x"
- "3007.x"

View file

@ -79,8 +79,7 @@ Package: salt-minion
Architecture: amd64 arm64
Replaces: salt-common (<= 3006.4)
Breaks: salt-common (<= 3006.4)
Depends: bsdmainutils,
dctrl-tools,
Depends: dctrl-tools,
salt-common (= ${source:Version}),
${misc:Depends}
Recommends: debconf-utils, dmidecode, net-tools

View file

@ -1,6 +1,6 @@
--constraint=constraints.txt
Jinja2
Jinja2>=3.1.5
jmespath
msgpack>=1.0.0
PyYAML
@ -16,6 +16,8 @@ packaging>=21.3
looseversion
tornado>=6.3.3
aiohttp>=3.9.0
urllib3>=1.26.18
croniter>=0.3.0,!=0.3.22; sys_platform != 'win32'
# We need contextvars for salt-ssh.

View file

@ -13,7 +13,7 @@ click==8.1.3
# towncrier
incremental==22.10.0
# via towncrier
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.10/linux.txt
# towncrier

View file

@ -196,7 +196,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/base.txt
@ -540,6 +540,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.10/darwin.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -62,7 +62,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/py3.10/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.10/linux.txt
# -r requirements/static/ci/docs.in

View file

@ -195,7 +195,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/base.txt
@ -545,6 +545,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.10/freebsd.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -219,7 +219,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/base.txt
@ -609,6 +609,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.10/linux.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -193,7 +193,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# -r requirements/base.txt
@ -492,6 +492,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.10/windows.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -13,7 +13,7 @@ click==8.1.3
# towncrier
incremental==17.5.0
# via towncrier
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.11/linux.txt
# towncrier

View file

@ -189,7 +189,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# -r requirements/base.txt
@ -531,6 +531,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.11/darwin.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -62,7 +62,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/py3.11/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.11/linux.txt
# -r requirements/static/ci/docs.in

View file

@ -188,7 +188,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# -r requirements/base.txt
@ -537,6 +537,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.11/freebsd.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -210,7 +210,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# -r requirements/base.txt
@ -599,6 +599,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.11/linux.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -186,7 +186,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# -r requirements/base.txt
@ -483,6 +483,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.11/windows.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -13,7 +13,7 @@ click==8.1.3
# towncrier
incremental==17.5.0
# via towncrier
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.12/linux.txt
# towncrier

View file

@ -259,7 +259,7 @@ jaraco.text==3.11.1
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
@ -764,6 +764,7 @@ urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -189,7 +189,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# -r requirements/base.txt
@ -531,6 +531,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.12/darwin.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -111,7 +111,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/py3.12/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
@ -284,6 +284,7 @@ uc-micro-py==1.0.1
urllib3==1.26.18
# via
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
# requests
yarl==1.9.4
# via

View file

@ -188,7 +188,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# -r requirements/base.txt
@ -537,6 +537,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.12/freebsd.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -286,7 +286,7 @@ jaraco.text==3.11.1
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
@ -771,6 +771,7 @@ urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -c requirements/static/ci/py3.12/linux.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -210,7 +210,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -r requirements/base.txt
@ -599,6 +599,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.12/linux.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -186,7 +186,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt
# -r requirements/base.txt
@ -483,6 +483,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.12/windows.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -1,10 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd-crypto.txt requirements/static/ci/crypto.in
#
m2crypto==0.38.0
# via -r requirements/static/ci/crypto.in
pycryptodome==3.19.1
# via -r requirements/static/ci/crypto.in

View file

@ -1,10 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/linux-crypto.txt requirements/static/ci/crypto.in
#
m2crypto==0.38.0
# via -r requirements/static/ci/crypto.in
pycryptodome==3.19.1
# via -r requirements/static/ci/crypto.in

View file

@ -1,10 +0,0 @@
#
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows-crypto.txt requirements/static/ci/crypto.in
#
m2crypto==0.38.0
# via -r requirements/static/ci/crypto.in
pycryptodome==3.19.1
# via -r requirements/static/ci/crypto.in

View file

@ -13,7 +13,7 @@ click==8.1.3
# towncrier
incremental==22.10.0
# via towncrier
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.9/linux.txt
# towncrier

View file

@ -196,7 +196,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/base.txt
@ -541,6 +541,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.9/darwin.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -66,7 +66,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/py3.9/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/py3.9/linux.txt
# -r requirements/static/ci/docs.in

View file

@ -195,7 +195,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/base.txt
@ -546,6 +546,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.9/freebsd.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -214,7 +214,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/base.txt
@ -598,6 +598,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.9/linux.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -193,7 +193,7 @@ jaraco.text==3.11.1
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# -r requirements/base.txt
@ -494,6 +494,7 @@ typing-extensions==4.8.0
urllib3==1.26.18
# via
# -c requirements/static/ci/../pkg/py3.9/windows.txt
# -r requirements/base.txt
# botocore
# docker
# kubernetes

View file

@ -63,7 +63,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -133,7 +133,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -63,7 +63,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -133,7 +133,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -63,7 +63,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -135,7 +135,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -69,7 +69,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -149,7 +149,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
wmi==1.5.1 ; sys_platform == "win32"
# via -r requirements/base.txt
xmltodict==0.13.0 ; sys_platform == "win32"

View file

@ -61,7 +61,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -131,7 +131,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -61,7 +61,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -133,7 +133,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -61,7 +61,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -135,7 +135,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -67,7 +67,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -147,7 +147,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
wmi==1.5.1 ; sys_platform == "win32"
# via -r requirements/base.txt
xmltodict==0.13.0 ; sys_platform == "win32"

View file

@ -61,7 +61,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -131,7 +131,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -61,7 +61,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -133,7 +133,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -61,7 +61,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -135,7 +135,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -67,7 +67,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -147,7 +147,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
wmi==1.5.1 ; sys_platform == "win32"
# via -r requirements/base.txt
xmltodict==0.13.0 ; sys_platform == "win32"

View file

@ -63,7 +63,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -133,7 +133,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -63,7 +63,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -133,7 +133,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -63,7 +63,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -135,7 +135,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
yarl==1.9.4
# via aiohttp
zc.lockfile==3.0.post1

View file

@ -69,7 +69,7 @@ jaraco.functools==3.7.0
# tempora
jaraco.text==3.11.1
# via jaraco.collections
jinja2==3.1.4
jinja2==3.1.6
# via -r requirements/base.txt
jmespath==1.0.1
# via -r requirements/base.txt
@ -150,7 +150,9 @@ typing-extensions==4.8.0
# pydantic
# pydantic-core
urllib3==1.26.18
# via requests
# via
# -r requirements/base.txt
# requests
wmi==1.5.1 ; sys_platform == "win32"
# via -r requirements/base.txt
xmltodict==0.13.0 ; sys_platform == "win32"

View file

@ -299,6 +299,8 @@ class SaltLoggingClass(LOGGING_LOGGER_CLASS, metaclass=LoggingMixinMeta):
except TypeError:
# Python < 3.8 - We still need this for salt-ssh since it will use
# the system python, and not out onedir.
# stacklevel was introduced in Py 3.8
# must be running on old OS with Python 3.6 or 3.7
LOGGING_LOGGER_CLASS._log(
self,
level,

View file

@ -930,6 +930,10 @@ def _virtual(osdata):
grains["virtual"] = "container"
grains["virtual_subtype"] = "LXC"
break
elif "podman" in output:
grains["virtual"] = "container"
grains["virtual_subtype"] = "Podman"
break
elif "amazon" in output:
grains["virtual"] = "Nitro"
grains["virtual_subtype"] = "Amazon EC2"

View file

@ -2508,7 +2508,7 @@ class Minion(MinionBase):
else:
data["fun"] = "state.highstate"
data["arg"] = []
self._handle_decoded_payload(data)
self.io_loop.add_callback(self._handle_decoded_payload, data)
def _refresh_grains_watcher(self, refresh_interval_in_minutes):
"""
@ -2529,6 +2529,7 @@ class Minion(MinionBase):
}
)
@salt.ext.tornado.gen.coroutine
def _fire_master_minion_start(self):
include_grains = False
if self.opts["start_event_grains"]:
@ -2536,13 +2537,13 @@ class Minion(MinionBase):
# Send an event to the master that the minion is live
if self.opts["enable_legacy_startup_events"]:
# Old style event. Defaults to False in 3001 release.
self._fire_master_main(
yield self._fire_master_main(
"Minion {} started at {}".format(self.opts["id"], time.asctime()),
"minion_start",
include_startup_grains=include_grains,
)
# send name spaced event
self._fire_master_main(
yield self._fire_master_main(
"Minion {} started at {}".format(self.opts["id"], time.asctime()),
tagify([self.opts["id"], "start"], "minion"),
include_startup_grains=include_grains,
@ -2981,7 +2982,7 @@ class Minion(MinionBase):
# make the schedule to use the new 'functions' loader
self.schedule.functions = self.functions
self.pub_channel.on_recv(self._handle_payload)
self._fire_master_minion_start()
yield self._fire_master_minion_start()
log.info("Minion is ready to receive requests!")
# update scheduled job to run with the new master addr
@ -3230,7 +3231,7 @@ class Minion(MinionBase):
self.setup_scheduler(before_connect=True)
self.sync_connect_master()
if self.connected:
self._fire_master_minion_start()
self.io_loop.add_callback(self._fire_master_minion_start)
log.info("Minion is ready to receive requests!")
# Make sure to gracefully handle SIGUSR1
@ -3273,7 +3274,8 @@ class Minion(MinionBase):
"minion is running under an init system."
)
self._fire_master_main(
self.io_loop.add_callback(
self._fire_master_main,
"ping",
"minion_ping",
timeout_handler=ping_timeout_handler,

View file

@ -10,7 +10,6 @@ import ast
import itertools
import logging
import os
from typing import Dict, List
import salt.utils.doc
import salt.utils.files
@ -36,7 +35,7 @@ def _get_module_name(tree, filename: str) -> str:
return module_name
def _get_func_aliases(tree) -> Dict:
def _get_func_aliases(tree) -> dict:
"""
Get __func_alias__ dict for mapping function names
"""
@ -54,7 +53,7 @@ def _get_func_aliases(tree) -> Dict:
return fun_aliases
def _get_args(function: str) -> Dict:
def _get_args(function: str) -> dict:
"""
Given a function def, returns arguments and defaults
"""
@ -128,7 +127,7 @@ def _parse_module_docs(module_path, mod_name=None):
return salt.utils.doc.strip_rst(ret)
def _parse_module_functions(module_py: str, return_type: str) -> Dict:
def _parse_module_functions(module_py: str, return_type: str) -> dict:
"""
Parse module files for proper module_name and function name, then gather
functions and possibly arguments
@ -161,7 +160,7 @@ def _parse_module_functions(module_py: str, return_type: str) -> Dict:
return ret
def _get_files(name=False, type="states", return_type="args") -> List:
def _get_files(name=False, type="states", return_type="args") -> list:
"""
Determine if modules/states directories or files are requested

View file

@ -283,7 +283,10 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd):
new_cmd.append("-Command")
if isinstance(cmd, list):
cmd = " ".join(cmd)
new_cmd.append(f"& {cmd.strip()}")
# We need to append $LASTEXITCODE here to return the actual exit code
# from the script. Otherwise, it will always return 1 on any non-zero
# exit code failure. Issue: #60884
new_cmd.append(f"& {cmd.strip()}; exit $LASTEXITCODE")
elif encoded_cmd:
new_cmd.extend(["-EncodedCommand", f"{cmd}"])
else:
@ -293,10 +296,10 @@ def _prep_powershell_cmd(win_shell, cmd, encoded_cmd):
# Commands that are a specific keyword behave differently. They fail if
# you add a "&" to the front. Add those here as we find them:
keywords = ["$", "&", ".", "Configuration"]
keywords = ["$", "&", ".", "Configuration", "try"]
for keyword in keywords:
if cmd.startswith(keyword):
if cmd.lower().startswith(keyword.lower()):
new_cmd.extend(["-Command", f"{cmd.strip()}"])
break
else:
@ -455,8 +458,6 @@ def _run(
if isinstance(cmd, (list, tuple)):
cmd = " ".join(cmd)
return win_runas(cmd, runas, password, cwd)
if runas and salt.utils.platform.is_darwin():
# We need to insert the user simulation into the command itself and not
# just run it from the environment on macOS as that method doesn't work
@ -489,7 +490,7 @@ def _run(
# hang.
runas = None
if runas:
if runas and not salt.utils.platform.is_windows():
# Save the original command before munging it
try:
pwd.getpwnam(runas)
@ -510,7 +511,7 @@ def _run(
else:
use_sudo = True
if runas or group:
if (runas or group) and not salt.utils.platform.is_windows():
try:
# Getting the environment for the runas user
# Use markers to thwart any stdout noise
@ -749,90 +750,104 @@ def _run(
if not use_vt:
# This is where the magic happens
try:
if runas and salt.utils.platform.is_windows():
# We can't use TimedProc with runas on Windows
if change_windows_codepage:
salt.utils.win_chcp.set_codepage_id(windows_codepage)
try:
proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs)
except OSError as exc:
msg = "Unable to run command '{}' with the context '{}', reason: {}".format(
cmd if output_loglevel is not None else "REDACTED",
new_kwargs,
exc,
)
raise CommandExecutionError(msg)
try:
proc.run()
except TimedProcTimeoutError as exc:
ret["stdout"] = str(exc)
ret["stderr"] = ""
ret["retcode"] = None
ret["pid"] = proc.process.pid
# ok return code for timeouts?
ret["retcode"] = 1
return ret
finally:
ret = win_runas(cmd, runas, password, cwd)
if change_windows_codepage:
salt.utils.win_chcp.set_codepage_id(previous_windows_codepage)
if output_loglevel != "quiet" and output_encoding is not None:
log.debug(
"Decoding output from command %s using %s encoding",
cmd,
output_encoding,
)
else:
try:
if change_windows_codepage:
salt.utils.win_chcp.set_codepage_id(windows_codepage)
try:
proc = salt.utils.timed_subprocess.TimedProc(cmd, **new_kwargs)
except OSError as exc:
msg = "Unable to run command '{}' with the context '{}', reason: {}".format(
cmd if output_loglevel is not None else "REDACTED",
new_kwargs,
exc,
)
raise CommandExecutionError(msg)
try:
out = salt.utils.stringutils.to_unicode(
proc.stdout, encoding=output_encoding
)
except TypeError:
# stdout is None
out = ""
except UnicodeDecodeError:
out = salt.utils.stringutils.to_unicode(
proc.stdout, encoding=output_encoding, errors="replace"
)
if output_loglevel != "quiet":
log.error(
"Failed to decode stdout from command %s, non-decodable "
"characters have been replaced",
_log_cmd(cmd),
try:
proc.run()
except TimedProcTimeoutError as exc:
ret["stdout"] = str(exc)
ret["stderr"] = ""
ret["retcode"] = None
ret["pid"] = proc.process.pid
# ok return code for timeouts?
ret["retcode"] = 1
return ret
finally:
if change_windows_codepage:
salt.utils.win_chcp.set_codepage_id(previous_windows_codepage)
if output_loglevel != "quiet" and output_encoding is not None:
log.debug(
"Decoding output from command %s using %s encoding",
cmd,
output_encoding,
)
try:
err = salt.utils.stringutils.to_unicode(
proc.stderr, encoding=output_encoding
)
except TypeError:
# stderr is None
err = ""
except UnicodeDecodeError:
err = salt.utils.stringutils.to_unicode(
proc.stderr, encoding=output_encoding, errors="replace"
)
if output_loglevel != "quiet":
log.error(
"Failed to decode stderr from command %s, non-decodable "
"characters have been replaced",
_log_cmd(cmd),
try:
out = salt.utils.stringutils.to_unicode(
proc.stdout, encoding=output_encoding
)
except TypeError:
# stdout is None
out = ""
except UnicodeDecodeError:
out = salt.utils.stringutils.to_unicode(
proc.stdout, encoding=output_encoding, errors="replace"
)
if output_loglevel != "quiet":
log.error(
"Failed to decode stdout from command %s, non-decodable "
"characters have been replaced",
_log_cmd(cmd),
)
try:
err = salt.utils.stringutils.to_unicode(
proc.stderr, encoding=output_encoding
)
except TypeError:
# stderr is None
err = ""
except UnicodeDecodeError:
err = salt.utils.stringutils.to_unicode(
proc.stderr, encoding=output_encoding, errors="replace"
)
if output_loglevel != "quiet":
log.error(
"Failed to decode stderr from command %s, non-decodable "
"characters have been replaced",
_log_cmd(cmd),
)
# Encoded commands dump CLIXML data in stderr. It's not an actual error
if encoded_cmd and "CLIXML" in err:
err = ""
if rstrip:
if out is not None:
out = out.rstrip()
if err is not None:
err = err.rstrip()
ret["pid"] = proc.process.pid
ret["retcode"] = proc.process.returncode
ret["stdout"] = out
ret["stderr"] = err
# Encoded commands dump CLIXML data in stderr. It's not an actual error
if encoded_cmd and "CLIXML" in err:
err = ""
if rstrip:
if out is not None:
out = out.rstrip()
if err is not None:
err = err.rstrip()
ret["pid"] = proc.process.pid
ret["retcode"] = proc.process.returncode
if ret["retcode"] in success_retcodes:
ret["retcode"] = 0
ret["stdout"] = out
ret["stderr"] = err
if any(
[stdo in ret["stdout"] for stdo in success_stdout]
+ [stde in ret["stderr"] for stde in success_stderr]
@ -4096,16 +4111,16 @@ def powershell(
# ConvertTo-JSON is only available on PowerShell 3.0 and later
psversion = shell_info("powershell")["psversion"]
if salt.utils.versions.version_cmp(psversion, "2.0") == 1:
cmd += " | ConvertTo-JSON"
cmd += " | ConvertTo-JSON "
if depth is not None:
cmd += f" -Depth {depth}"
cmd += f"-Depth {depth} "
# Put the whole command inside a try / catch block
# Some errors in PowerShell are not "Terminating Errors" and will not be
# caught in a try/catch block. For example, the `Get-WmiObject` command will
# often return a "Non Terminating Error". To fix this, make sure
# `-ErrorAction Stop` is set in the powershell command
cmd = "try {" + cmd + '} catch { "{}" }'
cmd = "try { " + cmd + ' } catch { "{}" }'
if encode_cmd:
# Convert the cmd to UTF-16LE without a BOM and base64 encode.
@ -4117,7 +4132,7 @@ def powershell(
cmd = salt.utils.stringutils.to_str(cmd)
encoded_cmd = True
else:
cmd = f"{{{cmd}}}"
cmd = f"{{ {cmd} }}"
encoded_cmd = False
# Retrieve the response, while overriding shell with 'powershell'

View file

@ -497,13 +497,14 @@ def get_group(path, follow_symlinks=True):
def uid_to_user(uid):
"""
Convert a uid to a user name
Convert a User ID (uid) to a username
Args:
uid (str): The user id to lookup
Returns:
str: The name of the user
str: The name of the user. The ``uid`` will be returned if there is no
corresponding username
CLI Example:

View file

@ -15,6 +15,9 @@ Support for YUM/DNF
.. versionadded:: 3003
Support for ``tdnf`` on Photon OS.
.. versionadded:: 3006.10
Support for ``dnf5``` on Fedora 41
.. versionadded:: 3007.0
Support for ``dnf5``` on Fedora 39
"""
@ -1034,7 +1037,7 @@ def list_upgrades(refresh=True, **kwargs):
cmd = ["--quiet"]
cmd.extend(options)
cmd.extend(["list", "upgrades" if _yum() in ("dnf", "dnf5") else "updates"])
cmd.extend(["list", "--upgrades" if _yum() in ("dnf", "dnf5") else "updates"])
out = _call_yum(cmd, ignore_retcode=True)
if out["retcode"] != 0 and "Error:" in out:
return {}
@ -1058,7 +1061,7 @@ def list_downloaded(**kwargs):
salt '*' pkg.list_downloaded
"""
CACHE_DIR = os.path.join("/var/cache/", _yum())
CACHE_DIR = os.path.join("/var/cache", _yum())
ret = {}
for root, dirnames, filenames in salt.utils.path.os_walk(CACHE_DIR):
@ -1428,8 +1431,8 @@ def install(
'version': '<new-version>',
'arch': '<new-arch>'}}}
"""
if (version := kwargs.get("version")) is not None:
kwargs["version"] = str(version)
if kwargs.get("version") is not None:
kwargs["version"] = str(kwargs["version"])
options = _get_options(**kwargs)
if salt.utils.data.is_true(refresh):
@ -1987,7 +1990,7 @@ def upgrade(
salt '*' pkg.upgrade security=True exclude='kernel*'
"""
if _yum() in ("dnf", "dnf5") and not obsoletes:
# for dnf we can just disable obsoletes
# for dnf[5] we can just disable obsoletes
_setopt = [
opt
for opt in salt.utils.args.split_input(kwargs.pop("setopt", []))
@ -2079,7 +2082,7 @@ def remove(name=None, pkgs=None, **kwargs): # pylint: disable=W0613
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any yum/dnf commands spawned by Salt when the
from killing any yum/dnf[5] commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
@ -2183,7 +2186,7 @@ def purge(name=None, pkgs=None, **kwargs): # pylint: disable=W0613
On minions running systemd>=205, `systemd-run(1)`_ is now used to
isolate commands which modify installed packages from the
``salt-minion`` daemon's control group. This is done to keep systemd
from killing any yum/dnf commands spawned by Salt when the
from killing any yum/dnf[5] commands spawned by Salt when the
``salt-minion`` service is restarted. (see ``KillMode`` in the
`systemd.kill(5)`_ manpage for more information). If desired, usage of
`systemd-run(1)`_ can be suppressed by setting a :mod:`config option
@ -3324,12 +3327,12 @@ def download(*packages, **kwargs):
.. versionadded:: 2015.5.0
Download packages to the local disk. Requires ``yumdownloader`` from
``yum-utils`` package.
``yum-utils`` or ``dnf-utils`` package.
.. note::
``yum-utils`` will already be installed on the minion if the package
was installed from the Fedora / EPEL repositories.
``yum-utils`` or ``dnf-utils`` will already be installed on the minion
if the package was installed from the EPEL / Fedora repositories.
CLI Example:
@ -3344,7 +3347,7 @@ def download(*packages, **kwargs):
if not packages:
raise SaltInvocationError("No packages were specified")
CACHE_DIR = "/var/cache/yum/packages"
CACHE_DIR = os.path.join("/var/cache", _yum(), "packages")
if not os.path.exists(CACHE_DIR):
os.makedirs(CACHE_DIR)
cached_pkgs = os.listdir(CACHE_DIR)
@ -3525,12 +3528,17 @@ def services_need_restart(**kwargs):
salt '*' pkg.services_need_restart
"""
if _yum() != "dnf":
raise CommandExecutionError("dnf is required to list outdated services.")
if _yum() not in ("dnf", "dnf5"):
raise CommandExecutionError(
"dnf or dnf5 is required to list outdated services."
)
if not salt.utils.systemd.booted(__context__):
raise CommandExecutionError("systemd is required to list outdated services.")
cmd = ["dnf", "--quiet", "needs-restarting"]
if _yum() == "dnf5":
cmd = ["dnf5", "--quiet", "needs-restarting"]
else:
cmd = ["dnf", "--quiet", "needs-restarting"]
dnf_output = __salt__["cmd.run_stdout"](cmd, python_shell=False)
if not dnf_output:
return []

View file

@ -185,7 +185,12 @@ class HANDLE(wintypes.HANDLE):
def Close(self, CloseHandle=kernel32.CloseHandle):
if self and not getattr(self, "closed", False):
CloseHandle(self.Detach())
try:
CloseHandle(self.Detach())
except OSError:
# Suppress the error when there is no handle (WinError 6)
if ctypes.get_last_error() == 6:
pass
__del__ = Close

View file

@ -492,9 +492,7 @@ class GitProvider:
self._cache_basename = "_"
if self.id.startswith("__env__"):
try:
self._cache_basename = self.get_checkout_target().replace(
"/", "-"
) # replace '/' with '-' to not cause trouble with file-system
self._cache_basename = self.get_checkout_target()
except AttributeError:
log.critical(
@ -2801,6 +2799,33 @@ class GitBase:
try:
# Find and place fetch_request file for all the other branches for this repo
repo_work_hash = os.path.split(repo.get_salt_working_dir())[0]
branches = [
os.path.relpath(path, repo_work_hash)
for (path, subdirs, files) in os.walk(repo_work_hash)
if not subdirs
]
for branch in branches:
# Don't place fetch request in current branch being updated
if branch == repo.get_cache_basename():
continue
branch_salt_dir = salt.utils.path.join(repo_work_hash, branch)
fetch_path = salt.utils.path.join(
branch_salt_dir, "fetch_request"
)
if os.path.isdir(branch_salt_dir):
try:
with salt.utils.files.fopen(fetch_path, "w"):
pass
except OSError as exc: # pylint: disable=broad-except
log.error(
"Failed to make fetch request: %s %s",
fetch_path,
exc,
exc_info=True,
)
else:
log.error("Failed to make fetch request: %s", fetch_path)
for branch in os.listdir(repo_work_hash):
# Don't place fetch request in current branch being updated
if branch == repo.get_cache_basename():

View file

@ -646,7 +646,7 @@ def query(
decode_body=decode_body,
)
return ret
except (socket.herror, OSError, socket.timeout, socket.gaierror) as exc:
except (socket.herror, OSError, TimeoutError, socket.gaierror) as exc:
if status is True:
ret["status"] = 0
ret["error"] = str(exc)

View file

@ -464,22 +464,42 @@ class ReactWrap:
"""
Wrap RunnerClient for executing :ref:`runner modules <all-salt.runners>`
"""
# pylint: disable=unsupported-membership-test,unsupported-assignment-operation
if "runner" not in self.client_cache:
log.debug("reactor edge case: re-populating client_cache for runner")
low = {"state": "runner"}
self.populate_client_cache(low)
return self.pool.fire_async(self.client_cache["runner"].low, args=(fun, kwargs))
def wheel(self, fun, **kwargs):
"""
Wrap Wheel to enable executing :ref:`wheel modules <all-salt.wheel>`
"""
# pylint: disable=unsupported-membership-test,unsupported-assignment-operation
if "wheel" not in self.client_cache:
log.debug("reactor edge case: re-populating client_cache for wheel")
low = {"state": "wheel"}
self.populate_client_cache(low)
return self.pool.fire_async(self.client_cache["wheel"].low, args=(fun, kwargs))
def local(self, fun, tgt, **kwargs):
"""
Wrap LocalClient for running :ref:`execution modules <all-salt.modules>`
"""
# pylint: disable=unsupported-membership-test,unsupported-assignment-operation
if "local" not in self.client_cache:
log.debug("reactor edge case: re-populating client_cache for local")
low = {"state": "local"}
self.populate_client_cache(low)
self.client_cache["local"].cmd_async(tgt, fun, **kwargs)
def caller(self, fun, **kwargs):
"""
Wrap LocalCaller to execute remote exec functions locally on the Minion
"""
# pylint: disable=unsupported-membership-test,unsupported-assignment-operation
if "caller" not in self.client_cache:
log.debug("reactor edge case: re-populating client_cache for caller")
low = {"state": "caller"}
self.populate_client_cache(low)
self.client_cache["caller"].cmd(fun, *kwargs["arg"], **kwargs["kwarg"])

View file

@ -125,7 +125,7 @@ should match what you see when you look at the properties for an object.
- subfolders_only: Applies to all containers beneath this object
- files_only: Applies to all file objects beneath this object
.. NOTE::
.. note::
'applies to' properties can only be modified on directories. Files
will always be ``this_folder_only``.
@ -883,10 +883,7 @@ def dacl(obj_name=None, obj_type="file"):
"""
# Get the principal from the sid (object sid)
sid = win32security.ConvertSidToStringSid(ace[2])
try:
principal = get_name(sid)
except CommandExecutionError:
principal = sid
principal = get_name(sid)
# Get the ace type
ace_type = self.ace_type[ace[0][0]]
@ -1194,14 +1191,17 @@ def get_name(principal):
principal (str):
Find the Normalized name based on this. Can be a PySID object, a SID
string, or a user name in any capitalization.
string, or a username in any capitalization.
.. note::
Searching based on the user name can be slow on hosts connected
Searching based on the username can be slow on hosts connected
to large Active Directory domains.
Returns:
str: The name that corresponds to the passed principal
str: The username that corresponds to the passed principal. If there is
no corresponding username, the string SID will be returned.
Capability SIDs will return ``None``.
Usage:
@ -1246,7 +1246,7 @@ def get_name(principal):
name = f"NT Service\\{name}"
return name
except (pywintypes.error, TypeError) as exc:
except pywintypes.error as exc:
# Microsoft introduced the concept of Capability SIDs in Windows 8
# https://docs.microsoft.com/en-us/windows/security/identity-protection/access-control/security-identifiers#capability-sids
# https://support.microsoft.com/en-us/help/4502539/some-sids-do-not-resolve-into-friendly-names
@ -1254,11 +1254,27 @@ def get_name(principal):
# These types of SIDs do not resolve, so we'll just ignore them for this
# All capability SIDs begin with `S-1-15-3`, so we'll only throw an
# error when the sid does not begin with `S-1-15-3`
if not str_sid.startswith("S-1-15-3"):
message = f'Error resolving "{principal}"'
if type(exc) == pywintypes.error:
win_error = win32api.FormatMessage(exc.winerror).rstrip("\n")
message = f"{message}: {win_error}"
# 1332: No mapping between account names and security IDs was done
if exc.winerror == 1332:
# Capability SID, return None
if str_sid.startswith("S-1-15-3"):
log.debug("Name mapping not available for capability SID: %s", str_sid)
return None
# User does not exist on the system or is on a disconnected domain
# Return the SID
else:
log.debug(
"Could not resolve SID: %s\nThe user has either been removed "
"from the system or is a domain user and the system is not "
"connected to the domain",
str_sid,
)
return str_sid
# Some other unknown error
else:
message = f'Error resolving "{principal}: {exc.strerror}"'
log.exception(message)
raise CommandExecutionError(message, exc)
@ -2242,13 +2258,19 @@ def _check_perms(obj_name, obj_type, new_perms, access_mode, ret, test_mode=Fals
cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type)
changes = {}
for user in new_perms:
applies_to_text = ""
# Check that user exists:
try:
user_name = get_name(principal=user)
except CommandExecutionError:
user_name = get_name(principal=user)
# username will be the SID if there is no corresponding username
if user_name == get_sid_string(principal=user):
ret["comment"].append(
'{} Perms: User "{}" missing from Target System'.format(
"{} Perms: Could not find a corresponding username for: {}".format(
access_mode.capitalize(), user
)
)
continue
if user_name is None:
ret["comment"].append(
"{} Perms: Skipping Capability SID: {}".format(
access_mode.capitalize(), user
)
)
@ -2471,7 +2493,7 @@ def check_perms(
log.debug("Resetting permissions for %s", obj_name)
cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type)
for user_name in cur_perms["Not Inherited"]:
# case insensitive dictionary search
# case-insensitive dictionary search
if user_name not in {get_name(k) for k in (grant_perms or {})}:
if "grant" in cur_perms["Not Inherited"][user_name]:
ret["changes"].setdefault("remove_perms", {})
@ -2489,7 +2511,7 @@ def check_perms(
ret["changes"]["remove_perms"].update(
{user_name: cur_perms["Not Inherited"][user_name]}
)
# case insensitive dictionary search
# case-insensitive dictionary search
if user_name not in {get_name(k) for k in (deny_perms or {})}:
if "deny" in cur_perms["Not Inherited"][user_name]:
ret["changes"].setdefault("remove_perms", {})
@ -2541,7 +2563,7 @@ def check_perms(
log.debug("Resetting permissions for %s", obj_name)
cur_perms = get_permissions(obj_name=obj_name, obj_type=obj_type)
for user_name in cur_perms["Not Inherited"]:
# case insensitive dictionary search
# case-insensitive dictionary search
if user_name not in {get_name(k) for k in (grant_perms or {})}:
if "grant" in cur_perms["Not Inherited"][user_name]:
rm_permissions(
@ -2550,7 +2572,7 @@ def check_perms(
ace_type="grant",
obj_type=obj_type,
)
# case insensitive dictionary search
# case-insensitive dictionary search
if user_name not in {get_name(k) for k in (deny_perms or {})}:
if "deny" in cur_perms["Not Inherited"][user_name]:
rm_permissions(
@ -2582,14 +2604,9 @@ def _set_perms(obj_dacl, obj_type, new_perms, cur_perms, access_mode):
ret = {}
for user in new_perms:
# Check that user exists:
try:
user_name = get_name(user)
except CommandExecutionError:
log.debug(
'%s Perms: User "%s" missing from Target System',
access_mode.capitalize(),
user,
)
user_name = get_name(user)
# We want to skip unmapped usernames
if user_name == get_sid_string(user):
continue
# Get applies_to

View file

@ -187,8 +187,10 @@ def runas(cmdLine, username, password=None, cwd=None):
| win32process.CREATE_SUSPENDED
)
flags = win32con.STARTF_USESTDHANDLES
flags |= win32con.STARTF_USESHOWWINDOW
startup_info = salt.platform.win.STARTUPINFO(
dwFlags=win32con.STARTF_USESTDHANDLES,
dwFlags=flags,
hStdInput=stdin_read.handle,
hStdOutput=stdout_write.handle,
hStdError=stderr_write.handle,
@ -197,6 +199,9 @@ def runas(cmdLine, username, password=None, cwd=None):
# Create the environment for the user
env = create_env(user_token, False)
if "&&" in cmdLine:
cmdLine = f'cmd /c "{cmdLine}"'
hProcess = None
try:
# Start the process in a suspended state.
@ -286,13 +291,18 @@ def runas_unpriv(cmd, username, password, cwd=None):
dupin = salt.platform.win.DuplicateHandle(srchandle=stdin, inherit=True)
# Get startup info structure
flags = win32con.STARTF_USESTDHANDLES
flags |= win32con.STARTF_USESHOWWINDOW
startup_info = salt.platform.win.STARTUPINFO(
dwFlags=win32con.STARTF_USESTDHANDLES,
dwFlags=flags,
hStdInput=dupin,
hStdOutput=c2pwrite,
hStdError=errwrite,
)
if "&&" in cmd:
cmd = f'cmd /c "{cmd}"'
try:
# Run command and return process info structure
process_info = salt.platform.win.CreateProcessWithLogonW(

View file

@ -0,0 +1,50 @@
import pytest
pytestmark = [
pytest.mark.core_test,
pytest.mark.windows_whitelisted,
]
@pytest.fixture(scope="module")
def account():
with pytest.helpers.create_account() as _account:
yield _account
@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
@pytest.mark.parametrize(
"exit_code, return_code, result",
[
(300, 0, True),
(299, 299, False),
],
)
def test_windows_script_exitcode(modules, state_tree, exit_code, return_code, result):
ret = modules.state.single(
"cmd.run", name=f"cmd.exe /c exit {exit_code}", success_retcodes=[2, 44, 300]
)
assert ret.result is result
assert ret.filtered["changes"]["retcode"] == return_code
@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
@pytest.mark.parametrize(
"exit_code, return_code, result",
[
(300, 0, True),
(299, 299, False),
],
)
def test_windows_script_exitcode_runas(
modules, state_tree, exit_code, return_code, result, account
):
ret = modules.state.single(
"cmd.run",
name=f"cmd.exe /c exit {exit_code}",
success_retcodes=[2, 44, 300],
runas=account.username,
password=account.password,
)
assert ret.result is result
assert ret.filtered["changes"]["retcode"] == return_code

View file

@ -13,6 +13,17 @@ def cmd(modules):
return modules.cmd
@pytest.fixture(scope="module")
def exitcode_script(state_tree):
exit_code = 12345
script_contents = f"""
Write-Host "Expected exit code: {exit_code}"
exit {exit_code}
"""
with pytest.helpers.temp_file("exit_code.ps1", script_contents, state_tree):
yield exit_code
@pytest.fixture(params=["powershell", "pwsh"])
def shell(request):
"""
@ -85,3 +96,9 @@ def test_windows_script_args_powershell_runas(cmd, shell, account, issue_56195):
)
assert ret["stdout"] == password
@pytest.mark.skip_unless_on_windows(reason="Minion is not Windows")
def test_windows_script_exitcode(cmd, shell, exitcode_script):
ret = cmd.script("salt://exit_code.ps1", shell=shell, saltenv="base")
assert ret["retcode"] == exitcode_script

View file

@ -0,0 +1,56 @@
"""
Test the win_runas util
"""
import pytest
import salt.utils.win_runas as win_runas
pytestmark = [
pytest.mark.windows_whitelisted,
pytest.mark.skip_unless_on_windows,
]
@pytest.fixture
def user():
with pytest.helpers.create_account() as account:
yield account
@pytest.mark.parametrize(
"cmd, expected",
[
("hostname && whoami", "username"),
("hostname && echo foo", "foo"),
("hostname && python --version", "Python"),
],
)
def test_compound_runas(user, cmd, expected):
if expected == "username":
expected = user.username
result = win_runas.runas(
cmdLine=cmd,
username=user.username,
password=user.password,
)
assert expected in result["stdout"]
@pytest.mark.parametrize(
"cmd, expected",
[
("hostname && whoami", "username"),
("hostname && echo foo", "foo"),
("hostname && python --version", "Python"),
],
)
def test_compound_runas_unpriv(user, cmd, expected):
if expected == "username":
expected = user.username
result = win_runas.runas_unpriv(
cmd=cmd,
username=user.username,
password=user.password,
)
assert expected in result["stdout"]

Some files were not shown because too many files have changed in this diff Show more