Merge pull request #67209 from dwoz/merge/3007.x/3006.x

Merge forward 3006.x to 3007.x
This commit is contained in:
Daniel Wozniak 2025-02-16 21:21:47 -07:00 committed by GitHub
commit 64a91d459f
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
198 changed files with 8235 additions and 16907 deletions

View file

@ -6,6 +6,3 @@ contact_links:
- name: Salt-Users Forum - name: Salt-Users Forum
url: https://groups.google.com/forum/#!forum/salt-users url: https://groups.google.com/forum/#!forum/salt-users
about: Please ask and answer questions here. about: Please ask and answer questions here.
- name: Salt on LiberaChat
url: https://web.libera.chat/#salt
about: Please ask and answer questions here.

View file

@ -8,7 +8,7 @@ assignees: ''
--- ---
### Description of the tech debt to be addressed, include links and screenshots ### Description of the tech debt to be addressed, include links and screenshots
<!-- Note: Please direct questions to the salt-users google group, IRC or Community Discord. --> <!-- Note: Please direct questions to the salt-users google group, GitHub Discussions or Community Discord. -->
### Versions Report ### Versions Report
(Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.) (Provided by running `salt --versions-report`. Please also mention any differences in master/minion versions.)

View file

@ -1,14 +1,5 @@
self-hosted-runner: self-hosted-runner:
# Labels of self-hosted runner in array of string # Labels of self-hosted runner in array of string
labels: labels:
- bastion - linux-x86_64
- x86_64 - linux-arm64
- arm64
- aarch64
- amd64
- repo-nightly
- repo-staging
- repo-release
- medium
- large
- macos-13-xlarge

View file

@ -26,10 +26,6 @@ inputs:
description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache' description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache'
default: 'false' default: 'false'
required: false required: false
save-always:
description: 'Run the post step to save the cache even if another step before fails'
default: 'false'
required: false
outputs: outputs:
cache-hit: cache-hit:
@ -49,7 +45,6 @@ runs:
echo "GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE=${{ inputs.enableCrossOsArchive }}" | tee -a "${GITHUB_ENV}" echo "GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE=${{ inputs.enableCrossOsArchive }}" | tee -a "${GITHUB_ENV}"
echo "GHA_CACHE_FAIL_ON_CACHE_MISS=${{ inputs.fail-on-cache-miss }}" | tee -a "${GITHUB_ENV}" echo "GHA_CACHE_FAIL_ON_CACHE_MISS=${{ inputs.fail-on-cache-miss }}" | tee -a "${GITHUB_ENV}"
echo "GHA_CACHE_LOOKUP_ONLY=${{ inputs.lookup-only }}" | tee -a "${GITHUB_ENV}" echo "GHA_CACHE_LOOKUP_ONLY=${{ inputs.lookup-only }}" | tee -a "${GITHUB_ENV}"
echo "GHA_CACHE_SAVE_ALWAYS=${{ inputs.save-always }}" | tee -a "${GITHUB_ENV}"
echo "GHA_CACHE_RESTORE_KEYS=${{ inputs.restore-keys }}" | tee -a "${GITHUB_ENV}" echo "GHA_CACHE_RESTORE_KEYS=${{ inputs.restore-keys }}" | tee -a "${GITHUB_ENV}"
echo "GHA_CACHE_UPLOAD_CHUNK_SIZE=${{ inputs.upload-chunk-size }}" | tee -a "${GITHUB_ENV}" echo "GHA_CACHE_UPLOAD_CHUNK_SIZE=${{ inputs.upload-chunk-size }}" | tee -a "${GITHUB_ENV}"
@ -63,7 +58,6 @@ runs:
enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }} enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }} fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }} lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }}
restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }} restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }} upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}
@ -97,7 +91,6 @@ runs:
enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }} enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }}
fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }} fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }}
lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }} lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }}
save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }}
restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }} restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }}
upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }} upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }}

View file

@ -54,10 +54,13 @@ runs:
working-directory: ${{ inputs.cwd }} working-directory: ${{ inputs.cwd }}
run: | run: |
PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }} PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }}
${PYTHON_EXE} -m ensurepip --upgrade
(${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 (${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1
if [ $exitcode -eq 0 ]; then if [ $exitcode -eq 0 ]; then
${PYTHON_EXE} -m pip install --break-system-packages --upgrade setuptools
${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt ${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
else else
${PYTHON_EXE} -m pip install --upgrade setuptools
${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt ${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt
fi fi

94
.github/actions/ssh-tunnel/README.md vendored Normal file
View file

@ -0,0 +1,94 @@
# SSH Tunnel
The ssh-tunnel action will create a reverse tunnel over webrtc to port 22 on the runner.
## Usage
In order to use this action you must have a sdp offer from your local host and a ssh key pair.
Start with creating an sdp offer on your local machine. Provide these values to the ssh-tunnel
action and wait for output from the action with the sdp reply. Provide the reply to the local
rtcforward.py process by pasting it to stdin. If all goes well the local port on your maching
will be forwarded to the ssh port on the runner.
### Getting an sdp offer
To get an sdp offer start rtcforward.py on you local machine with the offer command.
You can also specify which port on the local machine will be used for the tunnel.
``` bash
$ python3 .github/actions/ssh-tunnel/rtcforward.py offer --port 5222
```
rtcforward.py will create an offer an display it to your terminal. (This example offer has been truncated)
After showing the offer the `rtcforward.py` process will wait for a reply.
```
-- offer --
eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg4NjUzIDM5NDczODg2NTMgSU4gSVA0IDAuMC4wLjBcclxu
cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
bm09YXBwbGljYXRpb24gMzUyNjkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC4wLjIw
IHVkcCAxNjk0NDk4ODE1IDE4NC4xNzkuMjEwLjE1MiAzNTI2OSB0eXAgc3JmbHggcmFkZHIgMTkyLjE2
OC4wLjIwMSBycG9ydCAzNTI2OVxyXG5hPWNhbmRpZGF0ZTozZWFjMzJiZTZkY2RkMTAwZDcwMTFiNWY0
NTo4Qzo2MDoxMTpFQTo3NzpDMTo5RTo1QTo3QzpDQzowRDowODpFQzo2NDowQToxM1xyXG5hPWZpbmdl
cnByaW50OnNoYS01MTIgNjY6MzI6RUQ6MDA6N0I6QjY6NTQ6NzA6MzE6OTA6M0I6Mjg6Q0I6QTk6REU6
MzQ6QjI6NDY6NzE6NUI6MjM6ODA6Nzg6Njg6RDA6QTA6QTg6MjU6QkY6MDQ6ODY6NUY6OTA6QUY6MUQ6
QjA6QzY6ODA6QUY6OTc6QTI6MkM6NDI6QUU6MkI6Q0Q6Mjk6RUQ6MkI6ODc6NTU6ODg6NDY6QTM6ODk6
OEY6ODk6OTE6QTE6QTI6NDM6NTc6M0E6MjZcclxuYT1zZXR1cDphY3RwYXNzXHJcbiIsICJ0eXBlIjog
Im9mZmVyIn0=
-- end offer --
-- Please enter a message from remote party --
```
### Getting an sdp answer
Provide the offer to the ssh-tunnel action. When the action runs, an answer to the offer will be generated.
In the action output you will see that the offer was recieved and the reply in the output.
```
-- Please enter a message from remote party --
-- Message received --
-- reply --
eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg3NDcxIDM5NDczODc0NzEgSU4gSVA0IDAuMC4wLjBcclxu
cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
bm09YXBwbGljYXRpb24gNTcwMzkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC42NC4x
MFxyXG5hPW1pZDowXHJcbmE9c2N0cG1hcDo1MDAwIHdlYnJ0Yy1kYXRhY2hhbm5lbCA2NTUzNVxyXG5h
MTc6MEI6RTA6OTA6QUM6RjU6RTk6RUI6Q0E6RUE6NTY6REI6NTA6QTk6REY6NTU6MzY6MkM6REI6OUE6
MDc6Mzc6QTM6NDc6NjlcclxuYT1maW5nZXJwcmludDpzaGEtNTEyIDMyOjRDOjk0OkRDOjNFOkU5OkU3
OjNCOjc5OjI4OjZDOjc5OkFEOkVDOjIzOkJDOjRBOjRBOjE5OjlCOjg5OkE3OkE2OjZBOjAwOjJFOkM5
OkE0OjlEOjAwOjM0OjFFOjRDOkVGOjcwOkY5OkNBOjg0OjlEOjcxOjI5OkVCOkIxOkREOkFEOjg5OjUx
OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0
dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9
-- end reply --
```
# Finalizing the tunnel
Paste the sdp reply from the running action into the running `rtcforward.py` process that created the offer.
After receiveing the offer you will see `-- Message received --` and tunnel will be created.
```
-- offer --
eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg4NjUzIDM5NDczODg2NTMgSU4gSVA0IDAuMC4wLjBcclxu
cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
bm09YXBwbGljYXRpb24gMzUyNjkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC4wLjIw
IHVkcCAxNjk0NDk4ODE1IDE4NC4xNzkuMjEwLjE1MiAzNTI2OSB0eXAgc3JmbHggcmFkZHIgMTkyLjE2
OC4wLjIwMSBycG9ydCAzNTI2OVxyXG5hPWNhbmRpZGF0ZTozZWFjMzJiZTZkY2RkMTAwZDcwMTFiNWY0
NTo4Qzo2MDoxMTpFQTo3NzpDMTo5RTo1QTo3QzpDQzowRDowODpFQzo2NDowQToxM1xyXG5hPWZpbmdl
cnByaW50OnNoYS01MTIgNjY6MzI6RUQ6MDA6N0I6QjY6NTQ6NzA6MzE6OTA6M0I6Mjg6Q0I6QTk6REU6
MzQ6QjI6NDY6NzE6NUI6MjM6ODA6Nzg6Njg6RDA6QTA6QTg6MjU6QkY6MDQ6ODY6NUY6OTA6QUY6MUQ6
QjA6QzY6ODA6QUY6OTc6QTI6MkM6NDI6QUU6MkI6Q0Q6Mjk6RUQ6MkI6ODc6NTU6ODg6NDY6QTM6ODk6
OEY6ODk6OTE6QTE6QTI6NDM6NTc6M0E6MjZcclxuYT1zZXR1cDphY3RwYXNzXHJcbiIsICJ0eXBlIjog
Im9mZmVyIn0=
-- end offer --
-- Please enter a message from remote party --
eyJzZHAiOiAidj0wXHJcbm89LSAzOTQ3Mzg3NDcxIDM5NDczODc0NzEgSU4gSVA0IDAuMC4wLjBcclxu
cz0tXHJcbnQ9MCAwXHJcbmE9Z3JvdXA6QlVORExFIDBcclxuYT1tc2lkLXNlbWFudGljOldNUyAqXHJc
bm09YXBwbGljYXRpb24gNTcwMzkgRFRMUy9TQ1RQIDUwMDBcclxuYz1JTiBJUDQgMTkyLjE2OC42NC4x
MFxyXG5hPW1pZDowXHJcbmE9c2N0cG1hcDo1MDAwIHdlYnJ0Yy1kYXRhY2hhbm5lbCA2NTUzNVxyXG5h
MTc6MEI6RTA6OTA6QUM6RjU6RTk6RUI6Q0E6RUE6NTY6REI6NTA6QTk6REY6NTU6MzY6MkM6REI6OUE6
MDc6Mzc6QTM6NDc6NjlcclxuYT1maW5nZXJwcmludDpzaGEtNTEyIDMyOjRDOjk0OkRDOjNFOkU5OkU3
OjNCOjc5OjI4OjZDOjc5OkFEOkVDOjIzOkJDOjRBOjRBOjE5OjlCOjg5OkE3OkE2OjZBOjAwOjJFOkM5
OkE0OjlEOjAwOjM0OjFFOjRDOkVGOjcwOkY5OkNBOjg0OjlEOjcxOjI5OkVCOkIxOkREOkFEOjg5OjUx
OkZFOjhCOjI3OjFDOjFBOkJEOjUxOjQ2OjE4OjBBOjhFOjVBOjI1OjQzOjQzOjZGOkRBXHJcbmE9c2V0
dXA6YWN0aXZlXHJcbiIsICJ0eXBlIjogImFuc3dlciJ9
-- Message received --
```

107
.github/actions/ssh-tunnel/action.yml vendored Normal file
View file

@ -0,0 +1,107 @@
name: ssh-tunnel
description: SSH Reverse Tunnel
inputs:
public_key:
required: true
type: string
description: Public key to accept for reverse tunnel. Warning, this should not be the public key for the 'private_key' input.
offer:
required: true
type: string
description: RTC offer
debug:
required: false
type: bool
default: false
description: Run sshd with debug enabled.
runs:
using: composite
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Install ssh
if: ${{ runner.os == 'Windows' }}
shell: powershell
run: |
python3.exe -m pip install requests
python3.exe .github/actions/ssh-tunnel/installssh.py
- name: Start SSH
shell: bash
run: |
if [ "$RUNNER_OS" = "Windows" ]; then
powershell.exe -command "Start-Service sshd"
elif [ "$RUNNER_OS" = "macOS" ]; then
sudo launchctl load -w /System/Library/LaunchDaemons/ssh.plist
else
sudo systemctl start ssh
fi
- name: Show sshd configuration
shell: bash
run: |
if [ "$RUNNER_OS" = "Linux" ]; then
cat /etc/ssh/sshd_config
elif [ "$RUNNER_OS" = "macOS" ]; then
cat /private/etc/ssh/sshd_config
else
cat "C:\ProgramData\ssh\sshd_config"
fi
- name: Add ssh public key
shell: bash
run: |
if [ "$RUNNER_OS" = "Linux" ]; then
mkdir -p /home/runner/.ssh
chmod 700 /home/runner/.ssh
touch /home/runner/.ssh/authorized_keys
echo "${{ inputs.public_key }}" | tee -a /home/runner/.ssh/authorized_keys
elif [ "$RUNNER_OS" = "macOS" ]; then
mkdir -p /Users/runner/.ssh
chmod 700 /Users/runner/.ssh
touch /Users/runner/.ssh/authorized_keys
echo "${{ inputs.public_key }}" | tee -a /Users/runner/.ssh/authorized_keys
else
echo "${{ inputs.public_key }}" | tee -a "C:\ProgramData\ssh\administrators_authorized_keys"
fi
- name: Stop SSHD
if: ${{ inputs.debug }}
shell: bash
run: |
if [ "${{ inputs.debug }}" = "true" ]; then
if [ "$RUNNER_OS" = "Windows" ]; then
powershell.exe -command "Stop-Service sshd"
elif [ "$RUNNER_OS" = "macOS" ]; then
sudo launchctl unload /System/Library/LaunchDaemons/ssh.plist
else
sudo systemctl stop ssh
fi
fi
- name: Create rtc tunnel
shell: bash
run: |
if [ "${{ inputs.debug }}" = "true" ]; then
if [ "$RUNNER_OS" = "Windows" ]; then
./OpenSSH-Win64/sshd.exe -d &
elif [ "$RUNNER_OS" = "macOS" ]; then
sudo /usr/sbin/sshd -d &
else
sudo mkdir -p /run/sshd
sudo chmod 755 /run/sshd
sudo /usr/sbin/sshd -d &
fi
fi
if [ "$RUNNER_OS" = "Windows" ]; then
python3 -m pip install aiortc
else
python3 -m pip install aiortc uvloop
fi
echo '${{ inputs.offer }}' | python .github/actions/ssh-tunnel/rtcforward.py --port 22 answer

View file

@ -0,0 +1,44 @@
"""
"""
import pathlib
import subprocess
import zipfile
import requests
fwrule = """
New-NetFirewallRule `
-Name sshd `
-DisplayName 'OpenSSH SSH Server' `
-Enabled True `
-Direction Inbound `
-Protocol TCP `
-Action Allow `
-LocalPort 22 `
-Program "{}"
"""
def start_ssh_server():
"""
Pretty print the GH Actions event.
"""
resp = requests.get(
"https://github.com/PowerShell/Win32-OpenSSH/releases/download/v9.8.1.0p1-Preview/OpenSSH-Win64.zip",
allow_redirects=True,
)
with open("openssh.zip", "wb") as fp:
fp.write(resp.content)
with zipfile.ZipFile("openssh.zip") as fp:
fp.extractall()
install_script = pathlib.Path("./OpenSSH-Win64/install-sshd.ps1").resolve()
print(f"{install_script}")
subprocess.call(["powershell.exe", f"{install_script}"])
with open("fwrule.ps1", "w") as fp:
fp.write(fwrule.format(install_script.parent / "sshd.exe"))
subprocess.call(["powershell.exe", f"fwrule.ps1"])
if __name__ == "__main__":
start_ssh_server()

361
.github/actions/ssh-tunnel/rtcforward.py vendored Normal file
View file

@ -0,0 +1,361 @@
import argparse
import asyncio
import base64
import concurrent
import io
import json
import logging
import os
import sys
import textwrap
import time
aiortc = None
try:
import aiortc.exceptions
from aiortc import RTCIceCandidate, RTCPeerConnection, RTCSessionDescription
from aiortc.contrib.signaling import BYE, add_signaling_arguments, create_signaling
except ImportError:
pass
uvloop = None
try:
import uvloop
except ImportError:
pass
if sys.platform == "win32":
if not aiortc:
print("Please run 'pip install aiortc' and try again.")
sys.exit(1)
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
else:
if not aiortc or not uvloop:
print("Please run 'pip install aiortc uvloop' and try again.")
sys.exit(1)
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
log = logging.getLogger(__name__)
def object_from_string(message_str):
message = json.loads(message_str)
if message["type"] in ["answer", "offer"]:
return RTCSessionDescription(**message)
elif message["type"] == "candidate" and message["candidate"]:
candidate = candidate_from_sdp(message["candidate"].split(":", 1)[1])
candidate.sdpMid = message["id"]
candidate.sdpMLineIndex = message["label"]
return candidate
elif message["type"] == "bye":
return BYE
def object_to_string(obj):
if isinstance(obj, RTCSessionDescription):
message = {"sdp": obj.sdp, "type": obj.type}
elif isinstance(obj, RTCIceCandidate):
message = {
"candidate": "candidate:" + candidate_to_sdp(obj),
"id": obj.sdpMid,
"label": obj.sdpMLineIndex,
"type": "candidate",
}
else:
assert obj is BYE
message = {"type": "bye"}
return json.dumps(message, sort_keys=True)
def print_pastable(data, message="offer"):
print(f"-- {message} --")
sys.stdout.flush()
print(f"{data}")
sys.stdout.flush()
print(f"-- end {message} --")
sys.stdout.flush()
class ProxyClient:
def __init__(self, args, channel):
self.args = args
self.channel = channel
def start(self):
self.channel.on("message")(self.on_message)
def on_message(self, message):
msg = json.loads(message)
key = msg["key"]
data = msg["data"]
log.debug("new connection messsage %s", key)
pc = RTCPeerConnection()
@pc.on("datachannel")
def on_channel(channel):
log.info("Sub channel established %s", key)
asyncio.ensure_future(self.handle_channel(channel))
async def finalize_connection():
obj = object_from_string(data)
if isinstance(obj, RTCSessionDescription):
await pc.setRemoteDescription(obj)
if obj.type == "offer":
# send answer
await pc.setLocalDescription(await pc.createAnswer())
msg = {"key": key, "data": object_to_string(pc.localDescription)}
self.channel.send(json.dumps(msg))
elif isinstance(obj, RTCIceCandidate):
await pc.addIceCandidate(obj)
elif obj is BYE:
log.warning("Exiting")
asyncio.ensure_future(finalize_connection())
async def handle_channel(self, channel):
try:
reader, writer = await asyncio.open_connection("127.0.0.1", self.args.port)
log.info("opened connection to port %s", self.args.port)
@channel.on("message")
def on_message(message):
log.debug("rtc to socket %r", message)
writer.write(message)
asyncio.ensure_future(writer.drain())
while True:
data = await reader.read(100)
if data:
log.debug("socket to rtc %r", data)
channel.send(data)
except Exception:
log.exception("WTF4")
class ProxyServer:
def __init__(self, args, channel):
self.args = args
self.channel = channel
self.connections = {}
async def start(self):
@self.channel.on("message")
def handle_message(message):
asyncio.ensure_future(self.handle_message(message))
self.server = await asyncio.start_server(
self.new_connection, "127.0.0.1", self.args.port
)
log.info("Listening on port %s", self.args.port)
async with self.server:
await self.server.serve_forever()
async def handle_message(self, message):
msg = json.loads(message)
key = msg["key"]
pc = self.connections[key].pc
channel = self.connections[key].channel
obj = object_from_string(msg["data"])
if isinstance(obj, RTCSessionDescription):
await pc.setRemoteDescription(obj)
if obj.type == "offer":
# send answer
await pc.setLocalDescription(await pc.createAnswer())
msg = {
"key": key,
"data": object_to_string(pc.localDescription),
}
self.channel.send(json.dumps(msg))
elif isinstance(obj, RTCIceCandidate):
await pc.addIceCandidate(obj)
elif obj is BYE:
print("Exiting")
async def new_connection(self, reader, writer):
try:
info = writer.get_extra_info("peername")
key = f"{info[0]}:{info[1]}"
log.info("Connection from %s", key)
pc = RTCPeerConnection()
channel = pc.createDataChannel("{key}")
async def readerproxy():
while True:
data = await reader.read(100)
if data:
log.debug("socket to rtc %r", data)
try:
channel.send(data)
except aiortc.exceptions.InvalidStateError:
log.error(
"Channel was in an invalid state %s, bailing reader coroutine",
key,
)
break
@channel.on("open")
def on_open():
asyncio.ensure_future(readerproxy())
@channel.on("message")
def on_message(message):
log.debug("rtc to socket %r", message)
writer.write(message)
asyncio.ensure_future(writer.drain())
self.connections[key] = ProxyConnection(pc, channel)
await pc.setLocalDescription(await pc.createOffer())
msg = {
"key": key,
"data": object_to_string(pc.localDescription),
}
log.debug("Send new offer")
self.channel.send(json.dumps(msg, sort_keys=True))
except Exception:
log.exception("WTF")
class ProxyConnection:
def __init__(self, pc, channel):
self.pc = pc
self.channel = channel
async def read_from_stdin():
loop = asyncio.get_event_loop()
line = await loop.run_in_executor(
None, input, "-- Please enter a message from remote party --\n"
)
data = line
while line:
try:
line = await loop.run_in_executor(None, input)
except EOFError:
break
data += line
print("-- Message received --")
return data
async def run_answer(pc, args):
"""
Top level offer answer server.
"""
@pc.on("datachannel")
def on_datachannel(channel):
log.info("Channel created")
client = ProxyClient(args, channel)
client.start()
data = await read_from_stdin()
data = base64.b64decode(data)
obj = object_from_string(data)
if isinstance(obj, RTCSessionDescription):
log.debug("received rtc session description")
await pc.setRemoteDescription(obj)
if obj.type == "offer":
await pc.setLocalDescription(await pc.createAnswer())
data = object_to_string(pc.localDescription)
data = base64.b64encode(data.encode())
data = os.linesep.join(textwrap.wrap(data.decode(), 80))
print_pastable(data, "reply")
elif isinstance(obj, RTCIceCandidate):
log.debug("received rtc ice candidate")
await pc.addIceCandidate(obj)
elif obj is BYE:
print("Exiting")
while True:
await asyncio.sleep(0.3)
async def run_offer(pc, args):
"""
Top level offer server this will estabilsh a data channel and start a tcp
server on the port provided. New connections to the server will start the
creation of a new rtc connectin and a new data channel used for proxying
the client's connection to the remote side.
"""
control_channel = pc.createDataChannel("main")
log.info("Created control channel.")
async def start_server():
"""
Start the proxy server. The proxy server will create a local port and
handle creation of additional rtc peer connections for each new client
to the proxy server port.
"""
server = ProxyServer(args, control_channel)
await server.start()
@control_channel.on("open")
def on_open():
"""
Start the proxy server when the control channel is connected.
"""
asyncio.ensure_future(start_server())
await pc.setLocalDescription(await pc.createOffer())
data = object_to_string(pc.localDescription).encode()
data = base64.b64encode(data)
data = os.linesep.join(textwrap.wrap(data.decode(), 80))
print_pastable(data, "offer")
data = await read_from_stdin()
data = base64.b64decode(data.encode())
obj = object_from_string(data)
if isinstance(obj, RTCSessionDescription):
log.debug("received rtc session description")
await pc.setRemoteDescription(obj)
if obj.type == "offer":
# send answer
await pc.setLocalDescription(await pc.createAnswer())
await signaling.send(pc.localDescription)
elif isinstance(obj, RTCIceCandidate):
log.debug("received rtc ice candidate")
await pc.addIceCandidate(obj)
elif obj is BYE:
print("Exiting")
while True:
await asyncio.sleep(0.3)
if __name__ == "__main__":
if sys.platform == "win32":
asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
parser = argparse.ArgumentParser(description="Port proxy")
parser.add_argument("role", choices=["offer", "answer"])
parser.add_argument("--port", type=int, default=11224)
parser.add_argument("--verbose", "-v", action="count", default=None)
args = parser.parse_args()
if args.verbose is None:
logging.basicConfig(level=logging.WARNING)
elif args.verbose > 1:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
pc = RTCPeerConnection()
if args.role == "offer":
coro = run_offer(pc, args)
else:
coro = run_answer(pc, args)
# run event loop
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(coro)
except KeyboardInterrupt:
pass
finally:
loop.run_until_complete(pc.close())

12
.github/config.yml vendored
View file

@ -11,18 +11,16 @@ newIssueWelcomeComment: >
Also, check out some of our community Also, check out some of our community
resources including: resources including:
- [Community Wiki](https://github.com/saltstack/community/wiki)
- [Salts Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) - [Salts Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs) - [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
- [IRC on LiberaChat](https://web.libera.chat/#salt)
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg) - [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
- [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss) - [Community Wiki](https://github.com/saltstack/community/wiki)
There are lots of ways to get involved in our community. Every month, there are around a dozen There are lots of ways to get involved in our community. Every month, there are around a dozen
opportunities to meet with other contributors and the Salt Core team and collaborate in real opportunities to meet with other contributors and the Salt Core team and collaborate in real
time. The best way to keep track is by subscribing to the Salt Community Events Calendar. time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
If you have additional questions, email us at saltproject@vmware.com. Were glad If you have additional questions, email us at saltproject.pdl@broadcom.com. Were glad
youve joined our community and look forward to doing awesome things with youve joined our community and look forward to doing awesome things with
you! you!
@ -37,18 +35,16 @@ newPRWelcomeComment: >
Also, check out some of our community Also, check out some of our community
resources including: resources including:
- [Community Wiki](https://github.com/saltstack/community/wiki)
- [Salts Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) - [Salts Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html)
- [Join our Community Discord](https://discord.com/invite/J7b7EscrAs) - [Join our Community Discord](https://discord.com/invite/J7b7EscrAs)
- [IRC on LiberaChat](https://web.libera.chat/#salt)
- [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg) - [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg)
- [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss) - [Community Wiki](https://github.com/saltstack/community/wiki)
There are lots of ways to get involved in our community. Every month, there are around a dozen There are lots of ways to get involved in our community. Every month, there are around a dozen
opportunities to meet with other contributors and the Salt Core team and collaborate in real opportunities to meet with other contributors and the Salt Core team and collaborate in real
time. The best way to keep track is by subscribing to the Salt Community Events Calendar. time. The best way to keep track is by subscribing to the Salt Community Events Calendar.
If you have additional questions, email us at saltproject@vmware.com. Were glad If you have additional questions, email us at saltproject.pdl@broadcom.com. Were glad
youve joined our community and look forward to doing awesome things with youve joined our community and look forward to doing awesome things with
you! you!

View file

@ -34,6 +34,14 @@ on:
type: string type: string
description: The onedir package name to use description: The onedir package name to use
default: salt default: salt
matrix:
required: true
type: string
description: Json job matrix config
linux_arm_runner:
required: true
type: string
description: Json job matrix config
env: env:
@ -48,54 +56,22 @@ env:
jobs: jobs:
generate-matrix:
name: Generate Matrix
runs-on: ubuntu-latest
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
run: |
tools ci deps-matrix
linux-dependencies: linux-dependencies:
name: Linux name: Linux
needs: if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
- generate-matrix
runs-on: runs-on:
- self-hosted - ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
- linux
- bastion
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'false'
timeout-minutes: 90 timeout-minutes: 90
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['linux'] }} include: ${{ fromJSON(inputs.matrix)['linux'] }}
steps: steps:
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds" - name: "Throttle Builds"
shell: bash shell: bash
@ -105,6 +81,10 @@ jobs:
- name: Checkout Source Code - name: Checkout Source Code
uses: actions/checkout@v4 uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} - name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }}
id: nox-dependencies-cache id: nox-dependencies-cache
uses: ./.github/actions/cache uses: ./.github/actions/cache
@ -138,53 +118,34 @@ jobs:
with: with:
cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
- name: Get Salt Project GitHub Actions Bot Environment - name: Install System Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") echo true
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM - name: Install Nox
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ matrix.distro-slug }}
- name: Install Dependencies - name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
RELENV_BUILDENV: "1"
run: | run: |
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }} nox --install-only -e ${{ inputs.nox-session }}
- name: Cleanup .nox Directory - name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }} nox --force-color -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory - name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }} nox --force-color -e compress-dependencies -- linux ${{ matrix.arch }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ matrix.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }}
- name: Upload Nox Requirements Tarball - name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
@ -194,14 +155,13 @@ jobs:
macos-dependencies: macos-dependencies:
name: MacOS name: MacOS
needs: runs-on: ${{ matrix.arch == 'x86_64' && 'macos-13' || 'macos-14' }}
- generate-matrix if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }}
timeout-minutes: 90 timeout-minutes: 90
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['macos'] }} include: ${{ fromJSON(inputs.matrix)['macos'] }}
env: env:
PIP_INDEX_URL: https://pypi.org/simple PIP_INDEX_URL: https://pypi.org/simple
steps: steps:
@ -280,21 +240,19 @@ jobs:
name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }} name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
path: nox.macos.${{ matrix.arch }}.tar.* path: nox.macos.${{ matrix.arch }}.tar.*
windows-dependencies: windows-dependencies:
needs:
- generate-matrix
name: Windows name: Windows
runs-on: runs-on: windows-latest
- self-hosted if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
- linux
- bastion
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'false'
GITHUB_WORKSPACE: 'C:\Windows\Temp\testing'
timeout-minutes: 90 timeout-minutes: 90
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include)['windows'] }} include: ${{ fromJSON(inputs.matrix)['windows'] }}
steps: steps:
- name: "Throttle Builds" - name: "Throttle Builds"
@ -302,6 +260,10 @@ jobs:
run: | run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: "Show environment"
run: |
env
- name: Checkout Source Code - name: Checkout Source Code
uses: actions/checkout@v4 uses: actions/checkout@v4
@ -327,10 +289,11 @@ jobs:
cd artifacts cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz
- name: PyPi Proxy - name: Set up Python ${{ inputs.python-version }}
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | uses: actions/setup-python@v5
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt with:
python-version: "${{ inputs.python-version }}"
- name: Setup Python Tools Scripts - name: Setup Python Tools Scripts
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
@ -338,53 +301,33 @@ jobs:
with: with:
cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci
- name: Get Salt Project GitHub Actions Bot Environment - name: Install System Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") echo true
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM - name: Install Nox
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }}
- name: List Free Space
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Upload Checkout To VM
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm rsync ${{ matrix.distro-slug }}
- name: Install Dependencies - name: Install Dependencies
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
env:
PRINT_TEST_SELECTION: "0"
PRINT_SYSTEM_INFO: "0"
run: | run: |
tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ matrix.distro-slug }} nox --install-only -e ${{ inputs.nox-session }}
- name: Cleanup .nox Directory - name: Cleanup .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
tools --timestamps vm pre-archive-cleanup ${{ matrix.distro-slug }} nox --force-color -e "pre-archive-cleanup(pkg=False)"
- name: Compress .nox Directory - name: Compress .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: | run: |
tools --timestamps vm compress-dependencies ${{ matrix.distro-slug }} nox --force-color -e compress-dependencies -- windows ${{ matrix.arch }}
- name: Download Compressed .nox Directory
if: steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm download-dependencies ${{ matrix.distro-slug }}
- name: Destroy VM
if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true'
run: |
tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }}
- name: Upload Nox Requirements Tarball - name: Upload Nox Requirements Tarball
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4

View file

@ -8,12 +8,6 @@ on:
type: string type: string
required: true required: true
description: The Salt version to set prior to building packages. description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed: cache-seed:
required: true required: true
type: string type: string
@ -26,6 +20,14 @@ on:
required: true required: true
type: string type: string
description: The version of python to use with relenv description: The version of python to use with relenv
matrix:
required: true
type: string
description: Json job matrix config
linux_arm_runner:
required: true
type: string
description: Json job matrix config
env: env:
RELENV_DATA: "${{ github.workspace }}/.relenv" RELENV_DATA: "${{ github.workspace }}/.relenv"
@ -41,20 +43,15 @@ jobs:
build-deps-linux: build-deps-linux:
name: Linux name: Linux
if: ${{ inputs.self-hosted-runners }} if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
runs-on:
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
arch: include: ${{ fromJSON(inputs.matrix)['linux'] }}
- x86_64
- arm64
runs-on:
- self-hosted
- linux
- medium
- ${{ matrix.arch }}
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'false'
steps: steps:
- name: "Throttle Builds" - name: "Throttle Builds"
@ -64,6 +61,10 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Python Tools Scripts - name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts
with: with:
@ -89,19 +90,23 @@ jobs:
build-deps-macos: build-deps-macos:
name: macOS name: macOS
if: ${{ inputs.github-hosted-runners }} if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 2 max-parallel: 2
matrix: matrix:
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} include: ${{ fromJSON(inputs.matrix)['macos'] }}
runs-on: runs-on:
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
env: env:
USE_S3_CACHE: 'false' USE_S3_CACHE: 'false'
PIP_INDEX_URL: https://pypi.org/simple PIP_INDEX_URL: https://pypi.org/simple
steps: steps:
- name: "Check cores"
shell: bash
run: sysctl -n hw.ncpu
- name: "Throttle Builds" - name: "Throttle Builds"
shell: bash shell: bash
run: | run: |
@ -139,14 +144,12 @@ jobs:
build-deps-windows: build-deps-windows:
name: Windows name: Windows
if: ${{ inputs.github-hosted-runners }} if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 2 max-parallel: 2
matrix: matrix:
arch: include: ${{ fromJSON(inputs.matrix)['windows'] }}
- x86
- amd64
runs-on: windows-latest runs-on: windows-latest
env: env:
USE_S3_CACHE: 'false' USE_S3_CACHE: 'false'

View file

@ -24,15 +24,15 @@ jobs:
build: build:
name: Build name: Build
runs-on: runs-on:
- ubuntu-latest - ubuntu-22.04
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
docs-output: docs-output:
- linkcheck # XXX re-enable lintcheck and spellcheck then fix the errors
- spellcheck # - linkcheck
# - spellcheck
- html - html
# - pdf
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4

View file

@ -36,6 +36,14 @@ on:
required: true required: true
type: string type: string
description: Seed used to invalidate caches description: Seed used to invalidate caches
matrix:
required: true
type: string
description: Json job matrix config
linux_arm_runner:
required: true
type: string
description: Json job matrix config
env: env:
COLUMNS: 190 COLUMNS: 190
@ -46,19 +54,199 @@ env:
jobs: jobs:
build-deb-packages:
name: DEB
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
runs-on:
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(inputs.matrix)['linux'] }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
steps:
# Checkout here so we can easily use custom actions
- uses: actions/checkout@v4
# We need a more recent rustc
- name: Install a more recent `rustc`
if: ${{ inputs.source == 'src' }}
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set rust environment variables
if: ${{ inputs.source == 'src' }}
run: |
CARGO_HOME=${CARGO_HOME:-${HOME}/.cargo}
export CARGO_HOME
echo "CARGO_HOME=${CARGO_HOME}" | tee -a "${GITHUB_ENV}"
echo "${CARGO_HOME}/bin" | tee -a "${GITHUB_PATH}"
# Checkout here for the build process
- name: Checkout in build directory
uses: actions/checkout@v4
with:
path:
pkgs/checkout/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
path: pkgs/checkout/artifacts/
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}.patch
path: pkgs/checkout/
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cwd: pkgs/checkout/
cache-prefix: ${{ inputs.cache-prefix }}
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
cwd: pkgs/checkout/
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
working-directory: pkgs/checkout/
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
working-directory: pkgs/checkout/
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
- name: Build Deb
working-directory: pkgs/checkout/
run: |
tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
inputs.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
}}
- name: Cleanup
run: |
rm -rf pkgs/checkout/
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
fi
- name: Upload DEBs
uses: actions/upload-artifact@v4
with:
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
path: ${{ github.workspace }}/pkgs/*
retention-days: 7
if-no-files-found: error
build-rpm-packages:
name: RPM
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
runs-on:
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(inputs.matrix)['linux'] }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
steps:
- uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}.patch
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
- name: Build RPM
run: |
tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
inputs.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
}}
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
fi
- name: Upload RPMs
uses: actions/upload-artifact@v4
with:
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
path: ~/rpmbuild/RPMS/${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}/*.rpm
retention-days: 7
if-no-files-found: error
build-macos-pkgs: build-macos-pkgs:
name: macOS name: macOS
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
environment: ${{ inputs.environment }} environment: ${{ inputs.environment }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} include: ${{ fromJSON(inputs.matrix)['macos'] }}
source:
- ${{ inputs.source }}
env: env:
PIP_INDEX_URL: https://pypi.org/simple PIP_INDEX_URL: https://pypi.org/simple
runs-on: runs-on:
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
steps: steps:
- name: Check Package Signing Enabled - name: Check Package Signing Enabled
@ -162,212 +350,15 @@ jobs:
retention-days: 7 retention-days: 7
if-no-files-found: error if-no-files-found: error
build-deb-packages:
name: DEB
runs-on:
- self-hosted
- linux
- medium
- ${{ matrix.arch }}
strategy:
fail-fast: false
matrix:
arch:
- x86_64
- arm64
source:
- ${{ inputs.source }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12
steps:
# Checkout here so we can easily use custom actions
- uses: actions/checkout@v4
# We need a more recent rustc
- name: Install a more recent `rustc`
if: ${{ inputs.source == 'src' }}
uses: actions-rust-lang/setup-rust-toolchain@v1
- name: Set rust environment variables
if: ${{ inputs.source == 'src' }}
run: |
CARGO_HOME=${CARGO_HOME:-${HOME}/.cargo}
export CARGO_HOME
echo "CARGO_HOME=${CARGO_HOME}" | tee -a "${GITHUB_ENV}"
echo "${CARGO_HOME}/bin" | tee -a "${GITHUB_PATH}"
# Checkout here for the build process
- name: Checkout in build directory
uses: actions/checkout@v4
with:
path:
pkgs/checkout/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
path: pkgs/checkout/artifacts/
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}.patch
path: pkgs/checkout/
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cwd: pkgs/checkout/
cache-prefix: ${{ inputs.cache-prefix }}
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
cwd: pkgs/checkout/
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
working-directory: pkgs/checkout/
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
working-directory: pkgs/checkout/
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
- name: Build Deb
working-directory: pkgs/checkout/
run: |
tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
inputs.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
}}
- name: Cleanup
run: |
rm -rf pkgs/checkout/
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT"
fi
- name: Upload DEBs
uses: actions/upload-artifact@v4
with:
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
path: ${{ github.workspace }}/pkgs/*
retention-days: 7
if-no-files-found: error
build-rpm-packages:
name: RPM
runs-on:
- self-hosted
- linux
- medium
- ${{ matrix.arch }}
strategy:
fail-fast: false
matrix:
arch:
- x86_64
- arm64
source:
- ${{ inputs.source }}
container:
image: ghcr.io/saltstack/salt-ci-containers/packaging:rockylinux-9
steps:
- uses: actions/checkout@v4
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Download Release Patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}.patch
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Setup Salt Version
id: setup-salt-version
uses: ./.github/actions/setup-salt-version
with:
salt-version: "${{ inputs.salt-version }}"
- name: Configure Git
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg configure-git
- name: Apply release patch
if: ${{ startsWith(github.event.ref, 'refs/tags') == false }}
run: |
tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete
- name: Build RPM
run: |
tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{
inputs.source == 'onedir' &&
format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch)
||
format('--arch={0}', matrix.arch)
}}
- name: Set Artifact Name
id: set-artifact-name
run: |
if [ "${{ inputs.source }}" != "src" ]; then
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT"
else
echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT"
fi
- name: Upload RPMs
uses: actions/upload-artifact@v4
with:
name: ${{ steps.set-artifact-name.outputs.artifact-name }}
path: ~/rpmbuild/RPMS/${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}/*.rpm
retention-days: 7
if-no-files-found: error
build-windows-pkgs: build-windows-pkgs:
name: Windows name: Windows
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
environment: ${{ inputs.environment }} environment: ${{ inputs.environment }}
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 2 max-parallel: 2
matrix: matrix:
arch: include: ${{ fromJSON(inputs.matrix)['windows'] }}
- x86
- amd64
source:
- ${{ inputs.source }}
runs-on: runs-on:
- windows-latest - windows-latest
env: env:

View file

@ -8,12 +8,6 @@ on:
type: string type: string
required: true required: true
description: The Salt version to set prior to building packages. description: The Salt version to set prior to building packages.
github-hosted-runners:
type: boolean
required: true
self-hosted-runners:
type: boolean
required: true
cache-seed: cache-seed:
required: true required: true
type: string type: string
@ -26,6 +20,14 @@ on:
required: true required: true
type: string type: string
description: The version of python to use with relenv description: The version of python to use with relenv
matrix:
type: string
required: true
description: Json config for build matrix
linux_arm_runner:
required: true
type: string
description: Json job matrix config
env: env:
RELENV_DATA: "${{ github.workspace }}/.relenv" RELENV_DATA: "${{ github.workspace }}/.relenv"
@ -39,21 +41,18 @@ env:
jobs: jobs:
build-salt-linux: build-salt-linux:
name: Linux name: Linux
if: ${{ inputs.self-hosted-runners }} if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'false'
runs-on:
- ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
arch: include: ${{ fromJSON(inputs.matrix)['linux'] }}
- x86_64
- arm64
runs-on:
- self-hosted
- linux
- ${{ matrix.arch }}
steps: steps:
- name: "Throttle Builds" - name: "Throttle Builds"
@ -63,6 +62,10 @@ jobs:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Setup Python Tools Scripts - name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts
with: with:
@ -95,18 +98,22 @@ jobs:
build-salt-macos: build-salt-macos:
name: macOS name: macOS
if: ${{ inputs.github-hosted-runners }} if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 2 max-parallel: 2
matrix: matrix:
arch: ${{ github.event.repository.fork && fromJSON('["x86_64"]') || fromJSON('["x86_64", "arm64"]') }} include: ${{ fromJSON(inputs.matrix)['macos'] }}
runs-on: runs-on:
- ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} - ${{ matrix.arch == 'arm64' && 'macos-14' || 'macos-13' }}
env: env:
PIP_INDEX_URL: https://pypi.org/simple PIP_INDEX_URL: https://pypi.org/simple
steps: steps:
- name: "Check cores"
shell: bash
run: sysctl -n hw.ncpu
- name: "Throttle Builds" - name: "Throttle Builds"
shell: bash shell: bash
run: | run: |
@ -150,14 +157,12 @@ jobs:
build-salt-windows: build-salt-windows:
name: Windows name: Windows
if: ${{ inputs.github-hosted-runners }} if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
strategy: strategy:
fail-fast: false fail-fast: false
max-parallel: 2 max-parallel: 2
matrix: matrix:
arch: include: ${{ fromJSON(inputs.matrix)['windows'] }}
- x86
- amd64
runs-on: windows-latest runs-on: windows-latest
env: env:
PIP_INDEX_URL: https://pypi.org/simple PIP_INDEX_URL: https://pypi.org/simple

1676
.github/workflows/ci.yml vendored

File diff suppressed because it is too large Load diff

132
.github/workflows/draft-release.yml vendored Normal file
View file

@ -0,0 +1,132 @@
---
name: Draft Github Release
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
matrix:
required: true
type: string
description: Json job matrix config
build-matrix:
required: true
type: string
description: Json job matrix config
env:
COLUMNS: 190
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
jobs:
list-artifacts:
name: List Artifacts
runs-on: ubuntu-22.04
steps:
# Checkout here so we can easily use custom actions
- uses: actions/download-artifact@v4
with:
path: artifacts/
- name: List Directory Structure
run: ls -R artifacts/
create-github-release:
name: Draft Release v${{ inputs.salt-version }}
runs-on: ubuntu-22.04
outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }}
steps:
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
release_name: "Release v${{ inputs.salt-version }}"
tag_name: v${{ inputs.salt-version }}
draft: true
prerelease: false
- name: Release Output
run: echo "upload_url=${{ steps.create_release.outputs.upload_url }}" >> "$GITHUB_OUTPUT"
upload-source-tarball:
needs:
- create-github-release
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}.tar.gz
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
upload-onedir:
needs:
- create-github-release
strategy:
matrix:
include: ${{ fromJSON(inputs.matrix) }}
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.${{ matrix.platform == 'windows' && 'zip' || 'tar.xz' }}
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
upload-deb-packages:
needs:
- create-github-release
strategy:
matrix:
include: ${{ fromJSON(inputs.build-matrix)['linux'] }}
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
pattern: "*.deb"
upload-rpm-packages:
needs:
- create-github-release
strategy:
matrix:
include: ${{ fromJSON(inputs.build-matrix)['linux'] }}
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
upload-mac-packages:
needs:
- create-github-release
strategy:
matrix:
include: ${{ fromJSON(inputs.build-matrix)['macos'] }}
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
upload-windows-msi-packages:
needs:
- create-github-release
strategy:
matrix:
include: ${{ fromJSON(inputs.build-matrix)['windows'] }}
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI
upload_url: ${{ needs.create-github-release.outputs.upload_url }}
upload-windows-nsis-packages:
needs:
- create-github-release
strategy:
matrix:
include: ${{ fromJSON(inputs.build-matrix)['windows'] }}
uses: ./.github/workflows/release-artifact.yml
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS
upload_url: ${{ needs.create-github-release.outputs.upload_url }}

View file

@ -18,17 +18,13 @@ env:
jobs: jobs:
Salt: Salt:
name: Lint Salt's Source Code name: Lint Salt's Source Code
runs-on: ubuntu-latest runs-on: ubuntu-22.04
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }}
container: container:
image: ghcr.io/saltstack/salt-ci-containers/python:3.10 image: ghcr.io/saltstack/salt-ci-containers/python:3.10
steps: steps:
- name: Install System Deps
run: |
apt-get update
apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
- name: Add Git Safe Directory - name: Add Git Safe Directory
run: | run: |
@ -62,18 +58,13 @@ jobs:
Tests: Tests:
name: Lint Salt's Test Suite name: Lint Salt's Test Suite
runs-on: ubuntu-latest runs-on: ubuntu-22.04
if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }}
container: container:
image: ghcr.io/saltstack/salt-ci-containers/python:3.10 image: ghcr.io/saltstack/salt-ci-containers/python:3.10
steps: steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian bookworm-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
- name: Add Git Safe Directory - name: Add Git Safe Directory
run: | run: |

File diff suppressed because it is too large Load diff

View file

@ -21,21 +21,16 @@ jobs:
Pre-Commit: Pre-Commit:
name: Run Pre-Commit Against Salt name: Run Pre-Commit Against Salt
runs-on: ubuntu-latest runs-on: ubuntu-22.04
container: container:
image: ghcr.io/saltstack/salt-ci-containers/python:3.10 image: ghcr.io/saltstack/salt-ci-containers/testing:ubuntu-22.04
env: env:
PRE_COMMIT_COLOR: always PRE_COMMIT_COLOR: always
steps: steps:
- name: Install System Deps
run: |
apt-get update
apt-get install -y wget curl enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev rustc
- name: Add Git Safe Directory - name: Add Git Safe Directory
run: | run: |
git config --global --add safe.directory "$(pwd)" git config --global --add safe.directory "$(pwd)"

69
.github/workflows/release-artifact.yml vendored Normal file
View file

@ -0,0 +1,69 @@
---
name: Upload Release Artifact
on:
workflow_call:
inputs:
name:
type: string
required: true
description: The Salt version to set prior to building packages.
upload_url:
type: string
required: true
description: Release's upload url.
pattern:
type: string
required: false
description: Pattern of files to upload
jobs:
list-files:
name: List ${{ inputs.name }}
runs-on: ubuntu-22.04
outputs:
files: ${{ steps.list-files.outputs.files }}
steps:
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.name }}
path: artifacts
- run: find artifacts -maxdepth 1 -type f -printf '%f\n'
- id: list-files
run: |
if [ "${{ inputs.pattern }}" != "" ]; then
echo files="$(find artifacts -maxdepth 1 -type f -name '${{ inputs.pattern }}' -printf '%f\n' | jq -Rnc '[inputs | { file: "\(.)" }]')" >> "$GITHUB_OUTPUT"
else
echo files="$(find artifacts -maxdepth 1 -type f -printf '%f\n' | jq -Rnc '[inputs | { file: "\(.)" }]')" >> "$GITHUB_OUTPUT"
fi
upload-files:
name: Upload ${{ matrix.file }} from ${{ inputs.name }}
runs-on: ubuntu-22.04
needs:
- list-files
strategy:
matrix:
include: ${{ fromJSON(needs.list-files.outputs.files) }}
steps:
- uses: actions/download-artifact@v4
with:
name: ${{ inputs.name }}
path: artifacts
- name: Detect type of ${{ matrix.file }}
id: file-type
run: echo "file_type=$( file --mime-type artifacts/${{ matrix.file }} )" >> "$GITHUB_OUTPUT"
- name: Upload ${{ matrix.file }}
id: upload-release-asset-source
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ inputs.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
asset_path: artifacts/${{ matrix.file }}
asset_name: ${{ matrix.file }}
asset_content_type: ${{ steps.file-type.outputs.file_type }}

View file

@ -32,7 +32,7 @@ jobs:
permissions: permissions:
contents: write # for dev-drprasad/delete-tag-and-release to delete tags or releases contents: write # for dev-drprasad/delete-tag-and-release to delete tags or releases
name: Generate Tag and Github Release name: Generate Tag and Github Release
runs-on: ubuntu-latest runs-on: ubuntu-22.04
steps: steps:
- uses: dev-drprasad/delete-tag-and-release@v0.2.0 - uses: dev-drprasad/delete-tag-and-release@v0.2.0
if: github.event.inputs.reTag == 'true' if: github.event.inputs.reTag == 'true'

View file

@ -19,7 +19,7 @@ permissions:
jobs: jobs:
update-winrepo: update-winrepo:
name: Update Winrepo name: Update Winrepo
runs-on: ubuntu-latest runs-on: ubuntu-22.04
steps: steps:
- name: Checkout Salt - name: Checkout Salt

View file

@ -31,7 +31,6 @@ jobs:
runs-on: runs-on:
- self-hosted - self-hosted
- linux - linux
- repo-release
steps: steps:
- name: Checkout Salt - name: Checkout Salt

View file

@ -21,7 +21,7 @@ on:
env: env:
COLUMNS: 190 COLUMNS: 190
CACHE_SEED: SEED-2 # Bump the number to invalidate all caches CACHE_SEED: SEED-1 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv" RELENV_DATA: "${{ github.workspace }}/.relenv"
PIP_DISABLE_PIP_VERSION_CHECK: "1" PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -37,7 +37,7 @@ jobs:
check-requirements: check-requirements:
name: Check Requirements name: Check Requirements
runs-on: ubuntu-latest runs-on: ubuntu-22.04
environment: release-check environment: release-check
steps: steps:
- name: Check For Admin Permission - name: Check For Admin Permission
@ -49,11 +49,9 @@ jobs:
prepare-workflow: prepare-workflow:
name: Prepare Workflow Run name: Prepare Workflow Run
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-release
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'false'
environment: release environment: release
needs: needs:
- check-requirements - check-requirements
@ -63,6 +61,7 @@ jobs:
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }} releases: ${{ steps.get-salt-releases.outputs.releases }}
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }} nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
config: ${{ steps.workflow-config.outputs.config }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -121,12 +120,15 @@ jobs:
run: | run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT" echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml', '.github/workflows/build-deps-ci-action.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Define workflow config
id: workflow-config
run: |
tools ci workflow-config${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
download-onedir-artifact: download-onedir-artifact:
name: Download Staging Onedir Artifact name: Download Staging Onedir Artifact
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-release
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
environment: release environment: release
@ -186,13 +188,13 @@ jobs:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15 cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
backup: backup:
name: Backup name: Backup
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-release
needs: needs:
- prepare-workflow - prepare-workflow
env: env:
@ -223,9 +225,7 @@ jobs:
publish-repositories: publish-repositories:
name: Publish Repositories name: Publish Repositories
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-release
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
needs: needs:
@ -255,40 +255,17 @@ jobs:
run: | run: |
tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }} tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }}
pkg-download-tests:
name: Package Downloads
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
needs:
- prepare-workflow
- publish-repositories
- build-ci-deps
- download-onedir-artifact
uses: ./.github/workflows/test-package-downloads-action.yml
with:
nox-session: ci-test-onedir
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.15
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
environment: release
nox-version: 2022.8.7
python-version: "3.10"
skip-code-coverage: true
latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}"
secrets: inherit
release: release:
name: Release v${{ needs.prepare-workflow.outputs.salt-version }} name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
if: ${{ always() && ! failure() && ! cancelled() }} if: ${{ always() && ! failure() && ! cancelled() }}
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-release
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
needs: needs:
- prepare-workflow - prepare-workflow
- backup - backup
- publish-repositories - publish-repositories
- pkg-download-tests
environment: release environment: release
steps: steps:
- name: Clone The Salt Repository - name: Clone The Salt Repository
@ -395,9 +372,7 @@ jobs:
- release - release
environment: release environment: release
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-release
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
steps: steps:
@ -443,18 +418,16 @@ jobs:
TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}" TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}"
run: | run: |
tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
set-pipeline-exit-status: set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks # This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all # on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always() if: always()
runs-on: ubuntu-latest runs-on: ubuntu-22.04
needs: needs:
- check-requirements - check-requirements
- prepare-workflow - prepare-workflow
- publish-repositories - publish-repositories
- pkg-download-tests
- release - release
- publish-pypi - publish-pypi
- build-ci-deps - build-ci-deps
@ -480,8 +453,3 @@ jobs:
else else
exit 0 exit 0
fi fi
- name: Done
if: always()
run:
echo "All worflows finished"

File diff suppressed because it is too large Load diff

40
.github/workflows/ssh-debug.yml vendored Normal file
View file

@ -0,0 +1,40 @@
name: SSH Debug
run-name: "SSH Debug ${{ inputs.runner }}"
on:
workflow_dispatch:
inputs:
runner:
type: string
required: True
description: The runner to start a tunnel on.
offer:
type: string
required: True
description: SDP Offer
public_key:
type: string
required: True
description: Your public key for ssh access.
debug:
required: false
type: boolean
default: false
description: Run sshd with debug enabled.
jobs:
debug:
runs-on: ${{ inputs.runner }}
if: ${{ inputs.runner }}
environment: ci
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- uses: ./.github/actions/ssh-tunnel
with:
public_key: ${{ inputs.public_key }}
offer: ${{ inputs.offer }}
debug: ${{ inputs.debug }}

File diff suppressed because it is too large Load diff

View file

@ -1,9 +1,10 @@
build-ci-deps: build-ci-deps:
<%- do test_salt_needs.append("build-ci-deps") %> <%- do test_salt_needs.append("build-ci-deps") %>
<%- do test_salt_linux_needs.append("build-ci-deps") %>
name: CI Deps name: CI Deps
<%- if workflow_slug != 'release' %> <%- if workflow_slug != 'release' %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-deps-ci'] }}
<%- endif %> <%- endif %>
needs: needs:
- prepare-workflow - prepare-workflow
@ -20,3 +21,5 @@
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}

View file

@ -23,12 +23,6 @@
with: with:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download DEB Packages - name: Download DEB Packages
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:

View file

@ -13,12 +13,6 @@
with: with:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download macOS x86_64 Packages - name: Download macOS x86_64 Packages
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:

View file

@ -13,12 +13,6 @@
with: with:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download Linux x86_64 Onedir Archive - name: Download Linux x86_64 Onedir Archive
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:

View file

@ -1,4 +1,9 @@
<%- for backend in ("onedir", "src") %> <%- if gh_environment != "ci" -%>
<%- set pkg_types = ("onedir", "src") %>
<%- else -%>
<%- set pkg_types = ("onedir",) %>
<%- endif -%>
<%- for backend in pkg_types %>
<%- set job_name = "build-pkgs-{}".format(backend) %> <%- set job_name = "build-pkgs-{}".format(backend) %>
<%- if backend == "src" %> <%- if backend == "src" %>
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
@ -6,7 +11,7 @@
<{ job_name }>: <{ job_name }>:
name: Build Packages name: Build Packages
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['build-pkgs'] }}
needs: needs:
- prepare-workflow - prepare-workflow
- build-salt-onedir - build-salt-onedir
@ -17,11 +22,14 @@
relenv-version: "<{ relenv_version }>" relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>" python-version: "<{ python_version }>"
source: "<{ backend }>" source: "<{ backend }>"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
<%- if gh_environment != "ci" %> <%- if gh_environment != "ci" %>
environment: <{ gh_environment }> environment: <{ gh_environment }>
sign-macos-packages: false sign-macos-packages: false
sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %> sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %>
secrets: inherit secrets: inherit
<%- endif %> <%- endif %>
<%- endfor %> <%- endfor %>

View file

@ -1,35 +0,0 @@
<%- for type, display_name in (
("src", "Source"),
("deb", "DEB"),
("rpm", "RPM"),
("windows", "Windows"),
("macos", "macOS"),
("onedir", "Onedir"),
) %>
<%- set job_name = "build-{}-repo".format(type) %>
<%- do build_repo_needs.append(job_name) %>
<{ job_name }>:
name: Build Repository
environment: <{ gh_environment }>
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
env:
USE_S3_CACHE: 'true'
needs:
- prepare-workflow
<%- if type not in ("src", "onedir") %>
- build-pkgs-onedir
<%- elif type == 'onedir' %>
- build-salt-onedir
<%- elif type == 'src' %>
- build-source-tarball
- build-pkgs-src
<%- endif %>
<%- include "build-{}-repo.yml.jinja".format(type) %>
<%- endfor %>

View file

@ -23,12 +23,6 @@
with: with:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download RPM Packages - name: Download RPM Packages
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:

View file

@ -13,12 +13,6 @@
with: with:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Download Source Tarball - name: Download Source Tarball
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:

View file

@ -12,7 +12,6 @@
<{ job_name }>: <{ job_name }>:
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: Pre-Commit name: Pre-Commit
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/pre-commit-action.yml uses: ./.github/workflows/pre-commit-action.yml
needs: needs:
- prepare-workflow - prepare-workflow
@ -30,7 +29,7 @@
lint: lint:
<%- do conclusion_needs.append('lint') %> <%- do conclusion_needs.append('lint') %>
name: Lint name: Lint
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
uses: ./.github/workflows/lint-action.yml uses: ./.github/workflows/lint-action.yml
needs: needs:
- prepare-workflow - prepare-workflow
@ -44,7 +43,6 @@
<{ job_name }>: <{ job_name }>:
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: NSIS Tests name: NSIS Tests
if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
uses: ./.github/workflows/nsis-tests.yml uses: ./.github/workflows/nsis-tests.yml
needs: needs:
- prepare-workflow - prepare-workflow
@ -58,31 +56,19 @@
<{ job_name }>: <{ job_name }>:
name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}"
<%- if prepare_actual_release %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
runs-on: runs-on:
- self-hosted - ubuntu-22.04
- linux if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
- medium
- x86_64
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
runs-on: ubuntu-latest
<%- endif %>
needs: needs:
- prepare-workflow - prepare-workflow
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
<%- if not prepare_actual_release %>
- name: Set up Python 3.10 - name: Set up Python 3.10
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: "3.10" python-version: "3.10"
<%- endif %>
- name: Setup Python Tools Scripts - name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts
with: with:
@ -204,7 +190,7 @@
<{ job_name }>: <{ job_name }>:
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: Documentation name: Documentation
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs: needs:
- prepare-workflow - prepare-workflow
- build-source-tarball - build-source-tarball
@ -221,11 +207,11 @@
<{ job_name }>: <{ job_name }>:
name: Build Source Tarball name: Build Source Tarball
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs: needs:
- prepare-workflow - prepare-workflow
- prepare-release - prepare-release
runs-on: ubuntu-latest runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -258,29 +244,28 @@
<{ job_name }>: <{ job_name }>:
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: Build Dependencies Onedir name: Build Onedir Dependencies
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs: needs:
- prepare-workflow - prepare-workflow
uses: ./.github/workflows/build-deps-onedir.yml uses: ./.github/workflows/build-deps-onedir.yml
with: with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "<{ relenv_version }>" relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>" python-version: "<{ python_version }>"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
<%- endif %> <%- endif %>
<%- set job_name = "build-salt-onedir" %> <%- set job_name = "build-salt-onedir" %>
<%- if includes.get(job_name, True) %> <%- if includes.get(job_name, True) %>
<{ job_name }>: <{ job_name }>:
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: Build Salt Onedir name: Build Salt Onedir
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['<{ job_name }>'] }}
needs: needs:
- prepare-workflow - prepare-workflow
- build-deps-onedir - build-deps-onedir
@ -289,14 +274,13 @@
with: with:
cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
relenv-version: "<{ relenv_version }>" relenv-version: "<{ relenv_version }>"
python-version: "<{ python_version }>" python-version: "<{ python_version }>"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
<%- endif %> <%- endif %>
<%- set job_name = "build-pkgs" %> <%- set job_name = "build-pkgs" %>
<%- if includes.get(job_name, True) %> <%- if includes.get(job_name, True) %>
<%- include "build-packages.yml.jinja" %> <%- include "build-packages.yml.jinja" %>
@ -322,8 +306,8 @@
combine-all-code-coverage: combine-all-code-coverage:
<%- do conclusion_needs.append("combine-all-code-coverage") %> <%- do conclusion_needs.append("combine-all-code-coverage") %>
name: Combine Code Coverage name: Combine Code Coverage
if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] == false }}
runs-on: ubuntu-latest runs-on: ubuntu-22.04
env: env:
PIP_INDEX_URL: https://pypi.org/simple PIP_INDEX_URL: https://pypi.org/simple
needs: needs:

View file

@ -5,7 +5,7 @@
<%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %> <%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %>
<%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %> <%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %>
<%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %> <%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %>
<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}") %> <%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.config)['skip_code_coverage'] }}") %>
<%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set gpg_key_id = "64CBBC8173D76B3F" %>
<%- set prepare_actual_release = prepare_actual_release | default(False) %> <%- set prepare_actual_release = prepare_actual_release | default(False) %>
<%- set gh_actions_workflows_python_version = "3.10" %> <%- set gh_actions_workflows_python_version = "3.10" %>
@ -34,7 +34,7 @@ on:
env: env:
COLUMNS: 190 COLUMNS: 190
CACHE_SEED: SEED-2 # Bump the number to invalidate all caches CACHE_SEED: SEED-1 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv" RELENV_DATA: "${{ github.workspace }}/.relenv"
PIP_DISABLE_PIP_VERSION_CHECK: "1" PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
@ -77,7 +77,8 @@ jobs:
prepare-workflow: prepare-workflow:
name: Prepare Workflow Run name: Prepare Workflow Run
runs-on: ubuntu-latest runs-on: ubuntu-22.04
environment: ci
<%- if prepare_workflow_if_check %> <%- if prepare_workflow_if_check %>
if: <{ prepare_workflow_if_check }> if: <{ prepare_workflow_if_check }>
<%- endif %> <%- endif %>
@ -88,12 +89,7 @@ jobs:
<%- endfor %> <%- endfor %>
<%- endif %> <%- endif %>
outputs: outputs:
jobs: ${{ steps.define-jobs.outputs.jobs }}
runners: ${{ steps.runner-types.outputs.runners }}
changed-files: ${{ steps.process-changed-files.outputs.changed-files }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }}
os-labels: ${{ steps.get-pull-labels.outputs.os-labels }}
pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }}
testrun: ${{ steps.define-testrun.outputs.testrun }}
salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }}
cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }}
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
@ -101,6 +97,11 @@ jobs:
release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }} release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }}
testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }}
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }} nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
config: ${{ steps.workflow-config.outputs.config }}
env:
LINUX_ARM_RUNNER: ${{ vars.LINUX_ARM_RUNNER }}
FULL_TESTRUN_SLUGS: ${{ vars.FULL_TESTRUN_SLUGS }}
PR_TESTRUN_SLUGS: ${{ vars.PR_TESTRUN_SLUGS }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -213,14 +214,6 @@ jobs:
salt-version: "<{ prepare_workflow_salt_version_input }>" salt-version: "<{ prepare_workflow_salt_version_input }>"
validate-version: true validate-version: true
- name: Get Pull Request Test Labels
id: get-pull-labels
if: ${{ github.event_name == 'pull_request'}}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
tools ci get-pr-test-labels --repository ${{ github.repository }}
- name: Get Hash For Nox Tarball Cache - name: Get Hash For Nox Tarball Cache
id: nox-archive-hash id: nox-archive-hash
run: | run: |
@ -259,18 +252,6 @@ jobs:
run: | run: |
echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.' echo '${{ steps.process-changed-files.outputs.changed-files }}' | jq -C '.'
- name: Define Runner Types
id: runner-types
run: |
tools ci runner-types ${{ github.event_name }}
- name: Define Jobs To Run
id: define-jobs
run: |
tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
}> ${{ github.event_name }} changed-files.json
- name: Get Salt Releases - name: Get Salt Releases
id: get-salt-releases id: get-salt-releases
env: env:
@ -285,18 +266,20 @@ jobs:
run: | run: |
tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }}
- name: Define Testrun - name: Define workflow config
id: define-testrun id: workflow-config
run: | run: |
tools ci define-testrun ${{ github.event_name }} changed-files.json tools ci workflow-config<{ prepare_workflow_skip_test_suite }><{
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
}> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
- name: Check Contents of generated testrun-changed-files.txt - name: Check Contents of generated testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
run: | run: |
cat testrun-changed-files.txt || true cat testrun-changed-files.txt || true
- name: Upload testrun-changed-files.txt - name: Upload testrun-changed-files.txt
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} if: ${{ fromJSON(steps.workflow-config.outputs.config)['testrun']['type'] != 'full' }}
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: testrun-changed-files.txt name: testrun-changed-files.txt
@ -310,18 +293,18 @@ jobs:
{# We can't yet use tokenless uploads with the codecov CLI {# We can't yet use tokenless uploads with the codecov CLI
- name: Install Codecov CLI - name: Install Codecov CLI
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }} if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
run: | run: |
python3 -m pip install codecov-cli python3 -m pip install codecov-cli
- name: Save Commit Metadata In Codecov - name: Save Commit Metadata In Codecov
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }} if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
run: | run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
create-commit --git-service github --sha ${{ github.sha }} create-commit --git-service github --sha ${{ github.sha }}
- name: Create Codecov Coverage Report - name: Create Codecov Coverage Report
if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }} if: ${{ fromJSON(steps.define-testrun.outputs.config)['skip_code_coverage'] == false }}
run: | run: |
codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \
create-report --git-service github --sha ${{ github.sha }} create-report --git-service github --sha ${{ github.sha }}
@ -332,13 +315,12 @@ jobs:
<%- endif %> <%- endif %>
<%- endblock jobs %> <%- endblock jobs %>
set-pipeline-exit-status: set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks # This step is just so we can make github require this step, to pass checks
# on a pull request instead of requiring all # on a pull request instead of requiring all
name: Set the ${{ github.workflow }} Pipeline Exit Status name: Set the ${{ github.workflow }} Pipeline Exit Status
if: always() if: always()
runs-on: ubuntu-latest runs-on: ubuntu-22.04
<%- if workflow_slug == "nightly" %> <%- if workflow_slug == "nightly" %>
environment: <{ workflow_slug }> environment: <{ workflow_slug }>
<%- endif %> <%- endif %>
@ -358,6 +340,10 @@ jobs:
<%- for need in test_repo_needs.iter(consume=True) %> <%- for need in test_repo_needs.iter(consume=True) %>
- <{ need }> - <{ need }>
<%- endfor %> <%- endfor %>
<%- if workflow_slug != "release" %>
- test-packages
- test
<%- endif %>
steps: steps:
- name: Get workflow information - name: Get workflow information
id: get-workflow-info id: get-workflow-info
@ -374,8 +360,3 @@ jobs:
else else
exit 0 exit 0
fi fi
- name: Done
if: always()
run:
echo "All worflows finished"

View file

@ -56,67 +56,5 @@ concurrency:
<%- block jobs %> <%- block jobs %>
<{- super() }> <{- super() }>
<%- if includes.get("build-repos", True) %>
<%- include "build-repos.yml.jinja" %>
<%- endif %>
publish-repositories:
<%- do conclusion_needs.append('publish-repositories') %>
name: Publish Repositories
if: ${{ always() && ! failure() && ! cancelled() }}
runs-on:
- self-hosted
- linux
- repo-<{ gh_environment }>
environment: <{ gh_environment }>
needs:
- prepare-workflow
- build-docs
<%- for need in build_repo_needs.iter(consume=True) %>
- <{ need }>
<%- endfor %>
<%- if workflow_slug == "nightly" %>
<%- for need in test_salt_needs.iter(consume=True) %>
- <{ need }>
<%- endfor %>
<%- endif %>
steps:
- uses: actions/checkout@v4
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}
- name: Download Repository Artifact
uses: actions/download-artifact@v4
with:
pattern: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-repo-*
merge-multiple: true
path: repo/
- name: Decompress Repository Artifacts
run: |
find repo/ -type f -name '*.tar.gz' -print -exec tar xvf {} \;
find repo/ -type f -name '*.tar.gz' -print -exec rm -f {} \;
- name: Show Repository
run: |
tree -a artifacts/pkgs/repo/
- name: Upload Repository Contents (<{ gh_environment }>)
env:
SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }}
SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }}
run: |
tools pkg repo publish <{ gh_environment }> --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/
<%- endblock jobs %> <%- endblock jobs %>

View file

@ -52,7 +52,7 @@ permissions:
<{ job_name }>: <{ job_name }>:
<%- do prepare_workflow_needs.append(job_name) %> <%- do prepare_workflow_needs.append(job_name) %>
name: Check Requirements name: Check Requirements
runs-on: ubuntu-latest runs-on: ubuntu-22.04
environment: <{ gh_environment }>-check environment: <{ gh_environment }>-check
steps: steps:
- name: Check For Admin Permission - name: Check For Admin Permission
@ -71,11 +71,9 @@ permissions:
prepare-workflow: prepare-workflow:
name: Prepare Workflow Run name: Prepare Workflow Run
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'false'
environment: <{ gh_environment }> environment: <{ gh_environment }>
<%- if prepare_workflow_needs %> <%- if prepare_workflow_needs %>
needs: needs:
@ -89,6 +87,7 @@ permissions:
latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }}
releases: ${{ steps.get-salt-releases.outputs.releases }} releases: ${{ steps.get-salt-releases.outputs.releases }}
nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }} nox-archive-hash: ${{ steps.nox-archive-hash.outputs.nox-archive-hash }}
config: ${{ steps.workflow-config.outputs.config }}
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:
@ -147,6 +146,14 @@ permissions:
run: | run: |
echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT" echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT"
- name: Define workflow config
id: workflow-config
run: |
tools ci workflow-config<{ prepare_workflow_skip_test_suite }><{
prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite
}> ${{ steps.setup-salt-version.outputs.salt-version }} ${{ github.event_name }} changed-files.json
<%- endblock prepare_workflow_job %> <%- endblock prepare_workflow_job %>
<%- endif %> <%- endif %>
@ -156,9 +163,7 @@ permissions:
download-onedir-artifact: download-onedir-artifact:
name: Download Staging Onedir Artifact name: Download Staging Onedir Artifact
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
environment: <{ gh_environment }> environment: <{ gh_environment }>
@ -211,9 +216,7 @@ permissions:
backup: backup:
name: Backup name: Backup
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
needs: needs:
- prepare-workflow - prepare-workflow
env: env:
@ -245,9 +248,7 @@ permissions:
<%- do conclusion_needs.append('publish-repositories') %> <%- do conclusion_needs.append('publish-repositories') %>
name: Publish Repositories name: Publish Repositories
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
needs: needs:
@ -277,18 +278,12 @@ permissions:
run: | run: |
tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }} tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }}
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
release: release:
<%- do conclusion_needs.append('release') %> <%- do conclusion_needs.append('release') %>
name: Release v${{ needs.prepare-workflow.outputs.salt-version }} name: Release v${{ needs.prepare-workflow.outputs.salt-version }}
if: ${{ always() && ! failure() && ! cancelled() }} if: ${{ always() && ! failure() && ! cancelled() }}
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
needs: needs:
@ -402,9 +397,7 @@ permissions:
name: Restore Release Bucket From Backup name: Restore Release Bucket From Backup
if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }} if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }}
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
needs: needs:
@ -445,9 +438,7 @@ permissions:
- restore #} - restore #}
environment: <{ gh_environment }> environment: <{ gh_environment }>
runs-on: runs-on:
- self-hosted - linux-x86_64
- linux
- repo-<{ gh_environment }>
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
steps: steps:

View file

@ -51,9 +51,9 @@ on:
<%- block concurrency %> <%- block concurrency %>
concurrency: #concurrency:
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }} # group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }}
cancel-in-progress: false # cancel-in-progress: false
<%- endblock concurrency %> <%- endblock concurrency %>
@ -65,7 +65,7 @@ concurrency:
<{ job_name }>: <{ job_name }>:
<%- do prepare_workflow_needs.append(job_name) %> <%- do prepare_workflow_needs.append(job_name) %>
name: Check Requirements name: Check Requirements
runs-on: ubuntu-latest runs-on: ubuntu-22.04
environment: <{ gh_environment }>-check environment: <{ gh_environment }>-check
steps: steps:
- name: Check For Admin Permission - name: Check For Admin Permission
@ -86,21 +86,12 @@ concurrency:
needs: needs:
- prepare-workflow - prepare-workflow
- build-docs - build-docs
- build-src-repo
environment: <{ gh_environment }> environment: <{ gh_environment }>
runs-on: runs-on:
- self-hosted - ubuntu-22.04
- linux
- repo-<{ gh_environment }>
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Setup Python Tools Scripts - name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts
with: with:
@ -112,12 +103,6 @@ concurrency:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch
path: artifacts/release path: artifacts/release
- name: Download Source Repository
uses: actions/download-artifact@v4
with:
name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo
path: artifacts/release
- name: Download Release Documentation (HTML) - name: Download Release Documentation (HTML)
uses: actions/download-artifact@v4 uses: actions/download-artifact@v4
with: with:
@ -128,34 +113,6 @@ concurrency:
run: | run: |
tree -a artifacts/release tree -a artifacts/release
{#-
- name: Download Release Documentation (PDF)
uses: actions/download-artifact@v4
with:
name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.pdf
path: artifacts/release
#}
- name: Upload Release Artifacts
run: |
tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release
- name: Upload PyPi Artifacts
uses: actions/upload-artifact@v4
with:
name: pypi-artifacts
path: |
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc
retention-days: 7
if-no-files-found: error
<%- if includes.get("test-pkg-downloads", True) %>
<%- include "test-salt-pkg-repo-downloads.yml.jinja" %>
<%- endif %>
publish-pypi: publish-pypi:
<%- do conclusion_needs.append('publish-pypi') %> <%- do conclusion_needs.append('publish-pypi') %>
name: Publish to PyPi(test) name: Publish to PyPi(test)
@ -174,9 +131,7 @@ concurrency:
<%- endfor %> <%- endfor %>
environment: <{ gh_environment }> environment: <{ gh_environment }>
runs-on: runs-on:
- self-hosted - ubuntu-22.04
- linux
- repo-<{ gh_environment }>
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@ -221,4 +176,29 @@ concurrency:
run: | run: |
tools pkg pypi-upload --test artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz tools pkg pypi-upload --test artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz
draft-release:
name: Draft Github Release
if: |
always() && (needs.test.result == 'success' || needs.test.result == 'skipped') &&
(needs.test-packages.result == 'success' || needs.test-packages.result == 'skipped') &&
needs.prepare-workflow.result == 'success' && needs.build-salt-onedir.result == 'success' &&
needs.build-pkgs-onedir.result == 'success' && needs.pre-commit.result == 'success'
needs:
- prepare-workflow
- pre-commit
- build-salt-onedir
- build-pkgs-onedir
- test-packages
- test
permissions:
contents: write
pull-requests: read
id-token: write
uses: ./.github/workflows/draft-release.yml
with:
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['artifact-matrix']) }}
build-matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['build-matrix']) }}
<%- endblock jobs %> <%- endblock jobs %>

View file

@ -6,13 +6,12 @@
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: Package Downloads name: Package Downloads
<%- if gh_environment == "staging" %> <%- if gh_environment == "staging" %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg-download'] }}
<%- else %> <%- else %>
if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} if: ${{ inputs.skip-salt-pkg-download-test-suite == false }}
<%- endif %> <%- endif %>
needs: needs:
- prepare-workflow - prepare-workflow
- publish-repositories
- build-ci-deps - build-ci-deps
<%- if gh_environment == "release" %> <%- if gh_environment == "release" %>
- download-onedir-artifact - download-onedir-artifact

View file

@ -1,99 +1,19 @@
<%- for os in test_salt_pkg_listing["linux"] %> <%- set job_name = "test-packages" %>
<%- set job_name = "{}-pkg-tests{}".format(os.slug.replace(".", ""), os.fips and '-fips' or '') %>
<{ job_name }>: <{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %> name: Test Package
name: <{ os.display_name }> Package Test<%- if os.fips %> (fips)<%- endif %> if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test-pkg'] }}
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs: needs:
- prepare-workflow - prepare-workflow
- build-pkgs-onedir - build-pkgs-onedir
- build-ci-deps - build-ci-deps
uses: ./.github/workflows/test-packages-action-linux.yml uses: ./.github/workflows/test-packages-action.yml
with: with:
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir nox-session: ci-test-onedir
platform: linux
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ os.pkg_type }>
nox-version: <{ nox_version }> nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>" python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }> skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- if os.fips %> matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['pkg-test-matrix']) }}
fips: true linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
<%- endif %>
<%- endfor %>
<%- for os in test_salt_pkg_listing["macos"] %>
<%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ os.display_name }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-macos.yml
with:
distro-slug: <{ os.slug }>
runner: <{ os.runner }>
nox-session: ci-test-onedir
platform: macos
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: macos
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- endfor %>
<%- for os in test_salt_pkg_listing["windows"] %>
<%- set job_name = "{}-{}-pkg-tests".format(os.slug.replace(".", ""), os.pkg_type.lower()) %>
<{ job_name }>:
<%- do test_salt_pkg_needs.append(job_name) %>
name: <{ os.display_name }> <{ os.pkg_type }> Package Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-pkgs-onedir
- build-ci-deps
uses: ./.github/workflows/test-packages-action-windows.yml
with:
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: windows
arch: <{ os.arch }>
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
pkg-type: <{ os.pkg_type }>
nox-version: <{ nox_version }>
python-version: "<{ gh_actions_workflows_python_version }>"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }}
<%- endfor %>

View file

@ -1,103 +1,24 @@
{#- <%- if workflow_slug in ("nightly", "scheduled") %>
Full test runs. Each chunk should never take more than 2 hours. We allow 3, and on windows we add 30 more minutes. <%- set timeout_value = 360 %>
Partial test runs(no chunk parallelization), 6 Hours <%- else %>
#} <%- set timeout_value = 180 %>
<%- set full_testrun_timeout_value = 180 %> <%- endif %>
<%- set partial_testrun_timeout_value = 360 %> test:
<%- set windows_full_testrun_timeout_value = full_testrun_timeout_value + 30 %> name: Test Salt
if: ${{ fromJSON(needs.prepare-workflow.outputs.config)['jobs']['test'] }}
<%- for os in test_salt_listing["windows"] %>
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs: needs:
- prepare-workflow - prepare-workflow
- build-ci-deps - build-ci-deps
uses: ./.github/workflows/test-action-windows.yml uses: ./.github/workflows/test-action.yml
with: with:
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir nox-session: ci-test-onedir
platform: windows
arch: amd64
nox-version: <{ nox_version }> nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }} testrun: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['testrun']) }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }> skip-code-coverage: <{ skip_test_coverage_check }>
workflow-slug: <{ workflow_slug }> workflow-slug: <{ workflow_slug }>
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ windows_full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }} default-timeout: <{ timeout_value }>
matrix: ${{ toJSON(fromJSON(needs.prepare-workflow.outputs.config)['test-matrix']) }}
<%- endfor %> linux_arm_runner: ${{ fromJSON(needs.prepare-workflow.outputs.config)['linux_arm_runner'] }}
<%- for os in test_salt_listing["macos"] %>
<{ os.slug.replace(".", "") }>:
<%- do test_salt_needs.append(os.slug.replace(".", "")) %>
name: <{ os.display_name }> Test
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-macos.yml
with:
distro-slug: <{ os.slug }>
runner: <{ os.runner }>
nox-session: ci-test-onedir
platform: macos
arch: <{ os.arch }>
nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
workflow-slug: <{ workflow_slug }>
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
<%- endfor %>
<%- for os in test_salt_listing["linux"] %>
<%- set job_name = "{}{}".format(os.slug.replace(".", ""), os.fips and '-fips' or '') %>
<{ job_name }>:
<%- do test_salt_needs.append(job_name) %>
name: <{ os.display_name }> Test<%- if os.fips %> (fips)<%- endif %>
<%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }}
<%- else %>
if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }}
<%- endif %>
needs:
- prepare-workflow
- build-ci-deps
uses: ./.github/workflows/test-action-linux.yml
with:
distro-slug: <{ os.slug }>
nox-session: ci-test-onedir
platform: linux
arch: <{ os.arch }>
nox-version: <{ nox_version }>
gh-actions-python-version: "<{ gh_actions_workflows_python_version }>"
testrun: ${{ needs.prepare-workflow.outputs.testrun }}
salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}"
cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }>
skip-code-coverage: <{ skip_test_coverage_check }>
workflow-slug: <{ workflow_slug }>
timeout-minutes: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['type'] == 'full' && <{ full_testrun_timeout_value }> || <{ partial_testrun_timeout_value }> }}
<%- if os.fips %>
fips: true
<%- endif %>
<%- endfor %>

View file

@ -6,7 +6,7 @@
<%- do conclusion_needs.append(job_name) %> <%- do conclusion_needs.append(job_name) %>
name: Trigger Branch Workflows name: Trigger Branch Workflows
if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }}
runs-on: ubuntu-latest runs-on: ubuntu-22.04
needs: needs:
- workflow-requirements - workflow-requirements

View file

@ -4,7 +4,7 @@
<{ job_name }>: <{ job_name }>:
<%- do prepare_workflow_needs.append(job_name) %> <%- do prepare_workflow_needs.append(job_name) %>
name: Check Workflow Requirements name: Check Workflow Requirements
runs-on: ubuntu-latest runs-on: ubuntu-22.04
outputs: outputs:
requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} requirements-met: ${{ steps.check-requirements.outputs.requirements-met }}
steps: steps:

View file

@ -1,414 +0,0 @@
---
name: Test Artifact
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
nox-session:
required: true
type: string
description: The nox session to run
testrun:
required: true
type: string
description: JSON string containing information about what and how to run the test suite
salt-version:
type: string
required: true
description: The Salt version to set prior to running tests.
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
nox-version:
required: true
type: string
description: The nox version to install
timeout-minutes:
required: true
type: number
description: Timeout, in minutes, for the test job
gh-actions-python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
fips:
required: false
type: boolean
default: false
description: Test run with FIPS enabled
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
workflow-slug:
required: false
type: string
description: Which workflow is running.
default: ci
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
jobs:
generate-matrix:
name: Test Matrix
runs-on: ubuntu-latest
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
steps:
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
run: |
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }}
test:
name: Test
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: ${{ inputs.timeout-minutes }}
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
env:
SALT_TRANSPORT: ${{ matrix.transport }}
TEST_GROUP: ${{ matrix.test-group || 1 }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Salt Version
run: |
echo "${{ inputs.salt-version }}" > salt/_version.txt
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Download nox.linux.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-linux-${{ inputs.arch }}-${{ inputs.nox-session }}
- name: PyPi Proxy
run: |
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Download testrun-changed-files.txt
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
uses: actions/download-artifact@v4
with:
name: testrun-changed-files.txt
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
env:
TESTS_CHUNK: ${{ matrix.tests-chunk }}
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info
run: |
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }}
- name: Run Changed Tests
id: run-fast-changed-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --core-tests --slow-tests --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
- name: Run Fast Tests
id: run-fast-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
- name: Run Slow Tests
id: run-slow-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
- name: Run Core Tests
id: run-core-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
- name: Run Flaky Tests
id: run-flaky-tests
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
- name: Run Full Tests
id: run-full-tests
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-E TEST_GROUP ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
--test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}${{ inputs.fips && '.fips' || '' }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
fi
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Upload Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v4
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/coverage/
include-hidden-files: true
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/xml-unittests-output/
include-hidden-files: true
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/logs
include-hidden-files: true
report:
name: Test Reports
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
runs-on: ubuntu-latest
needs:
- test
- generate-matrix
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Merge JUnit XML Test Run Artifacts
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Merge Log Test Run Artifacts
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
pattern: testrun-log-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Merge Code Coverage Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Download Code Coverage Test Run Artifacts
uses: actions/download-artifact@v4
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
path: artifacts/coverage/
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}*
merge-multiple: true
- name: Show Downloaded Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
run: |
tree -a artifacts
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e create-xml-coverage-reports
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}${{ inputs.fips && '..fips' || '' }}..${{ inputs.nox-session }}.xml
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}${{ inputs.fips && '..fips' || '' }}..${{ inputs.nox-session }}.xml
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- salt
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}${{ inputs.fips && '.fips' || '' }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.nox-session }}
path: artifacts/coverage
include-hidden-files: true

View file

@ -1,449 +0,0 @@
---
name: Test Artifact(macOS)
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
runner:
required: true
type: string
description: The GitHub runner name
nox-session:
required: true
type: string
description: The nox session to run
testrun:
required: true
type: string
description: JSON string containing information about what and how to run the test suite
gh-actions-python-version:
required: false
type: string
description: The python version to run tests with
default: "3.11"
salt-version:
type: string
required: true
description: The Salt version to set prior to running tests.
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
nox-version:
required: true
type: string
description: The nox version to install
timeout-minutes:
required: true
type: number
description: Timeout, in minutes, for the test job
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
workflow-slug:
required: false
type: string
description: Which workflow is running.
default: ci
env:
COLUMNS: 190
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
jobs:
generate-matrix:
name: Test Matrix
runs-on: ubuntu-latest
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
steps:
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
run: |
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ inputs.distro-slug }}
test:
name: Test
runs-on: ${{ inputs.runner }}
timeout-minutes: ${{ inputs.timeout-minutes }}
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
env:
SALT_TRANSPORT: ${{ matrix.transport }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Salt Version
run: |
echo "${{ inputs.salt-version }}" > salt/_version.txt
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Install System Dependencies
run: |
brew install tree
- name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-macos-${{ inputs.arch }}-${{ inputs.nox-session }}
- name: Set up Python ${{ inputs.gh-actions-python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.gh-actions-python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- macos ${{ inputs.arch }}
- name: Download testrun-changed-files.txt
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
uses: actions/download-artifact@v4
with:
name: testrun-changed-files.txt
- name: Show System Info
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_SYSTEM_INFO_ONLY: "1"
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }}
- name: Run Changed Tests
id: run-fast-changed-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --core-tests --slow-tests --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
- name: Run Fast Tests
id: run-fast-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code
- name: Run Slow Tests
id: run-slow-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests
- name: Run Core Tests
id: run-core-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests
- name: Run Flaky Tests
id: run-flaky-tests
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
-k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail
- name: Run Full Tests
id: run-full-tests
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \
--slow-tests --core-tests -k "mac or darwin"
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false
run: |
nox --force-color -e combine-coverage
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
fi
- name: Upload Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v4
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
path: |
artifacts/coverage/
include-hidden-files: true
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
path: |
artifacts/xml-unittests-output/
include-hidden-files: true
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ env.TIMESTAMP }}
path: |
artifacts/logs
include-hidden-files: true
report:
name: Test Reports
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
runs-on: ubuntu-latest
needs:
- test
- generate-matrix
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Merge JUnit XML Test Run Artifacts
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Merge Log Test Run Artifacts
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
pattern: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Merge Code Coverage Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Download Code Coverage Test Run Artifacts
uses: actions/download-artifact@v4
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
path: artifacts/coverage/
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}*
merge-multiple: true
- name: Show Downloaded Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
run: |
tree -a artifacts
- name: Set up Python ${{ inputs.gh-actions-python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.gh-actions-python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e create-xml-coverage-reports
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- salt
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
path: artifacts/coverage
include-hidden-files: true

View file

@ -1,415 +0,0 @@
---
name: Test Artifact
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
nox-session:
required: true
type: string
description: The nox session to run
testrun:
required: true
type: string
description: JSON string containing information about what and how to run the test suite
salt-version:
type: string
required: true
description: The Salt version to set prior to running tests.
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
nox-version:
required: true
type: string
description: The nox version to install
timeout-minutes:
required: true
type: number
description: Timeout, in minutes, for the test job
gh-actions-python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
fips:
required: false
type: boolean
default: false
description: Test run with FIPS enabled
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
workflow-slug:
required: false
type: string
description: Which workflow is running.
default: ci
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
jobs:
generate-matrix:
name: Test Matrix
runs-on: ubuntu-latest
outputs:
matrix-include: ${{ steps.generate-matrix.outputs.matrix }}
build-reports: ${{ steps.generate-matrix.outputs.build-reports }}
steps:
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Generate Test Matrix
id: generate-matrix
run: |
tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }}
test:
name: Test
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: ${{ inputs.timeout-minutes }}
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }}
env:
SALT_TRANSPORT: ${{ matrix.transport }}
TEST_GROUP: ${{ matrix.test-group || 1 }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Salt Version
run: |
echo "${{ inputs.salt-version }}" > salt/_version.txt
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Download nox.windows.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-windows-${{ inputs.arch }}-${{ inputs.nox-session }}
- name: PyPi Proxy
run: |
sed -i '7s;^;--index-url=${{ vars.PIP_INDEX_URL }} --trusted-host ${{ vars.PIP_TRUSTED_HOST }} --extra-index-url=${{ vars.PIP_EXTRA_INDEX_URL }}\n;' requirements/static/ci/*/*.txt
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Download testrun-changed-files.txt
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
uses: actions/download-artifact@v4
with:
name: testrun-changed-files.txt
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
env:
TESTS_CHUNK: ${{ matrix.tests-chunk }}
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info
run: |
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
--nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }}
- name: Run Changed Tests
id: run-fast-changed-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --core-tests --slow-tests --suppress-no-test-exit-code \
--from-filenames=testrun-changed-files.txt
- name: Run Fast Tests
id: run-fast-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}
- name: Run Slow Tests
id: run-slow-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests
- name: Run Core Tests
id: run-core-tests
if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests
- name: Run Flaky Tests
id: run-flaky-tests
if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail
- name: Run Full Tests
id: run-full-tests
if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }}
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \
--nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \
-E TEST_GROUP ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \
--test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }}
- name: Combine Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm combine-coverage ${{ inputs.distro-slug }}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }}
fi
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Upload Code Coverage Test Run Artifacts
if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled'
uses: actions/upload-artifact@v4
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/coverage/
include-hidden-files: true
- name: Upload JUnit XML Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/xml-unittests-output/
include-hidden-files: true
- name: Upload Test Run Log Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-grp${{ matrix.test-group || '1' }}-${{ env.TIMESTAMP }}
path: |
artifacts/logs
include-hidden-files: true
report:
name: Test Reports
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
runs-on: ubuntu-latest
needs:
- test
- generate-matrix
env:
PIP_INDEX_URL: https://pypi.org/simple
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Merge JUnit XML Test Run Artifacts
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
pattern: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Merge Log Test Run Artifacts
if: always() && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
pattern: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Merge Code Coverage Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-*
separate-directories: false
delete-merged: true
- name: Download Code Coverage Test Run Artifacts
uses: actions/download-artifact@v4
if: ${{ inputs.skip-code-coverage == false }}
id: download-coverage-artifacts
with:
path: artifacts/coverage/
pattern: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}*
merge-multiple: true
- name: Show Downloaded Test Run Artifacts
if: ${{ inputs.skip-code-coverage == false }}
run: |
tree -a artifacts
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
- name: Create XML Coverage Reports
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled'
run: |
nox --force-color -e create-xml-coverage-reports
mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml || true
mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml || true
- name: Report Salt Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage -- salt
- name: Report Combined Code Coverage
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
nox --force-color -e report-coverage
- name: Rename Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
continue-on-error: true
run: |
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}
- name: Upload Code Coverage DB
if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: all-testrun-coverage-artifacts-${{ inputs.distro-slug }}.${{ inputs.nox-session }}
path: artifacts/coverage
include-hidden-files: true

1392
.github/workflows/test-action.yml vendored Normal file

File diff suppressed because it is too large Load diff

View file

@ -88,9 +88,7 @@ jobs:
needs: needs:
- generate-matrix - generate-matrix
runs-on: runs-on:
- self-hosted - ubuntu-latest
- linux
- bastion
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
environment: ${{ inputs.environment }} environment: ${{ inputs.environment }}
@ -497,9 +495,7 @@ jobs:
env: env:
USE_S3_CACHE: 'true' USE_S3_CACHE: 'true'
runs-on: runs-on:
- self-hosted - ubuntu-latest
- linux
- bastion
environment: ${{ inputs.environment }} environment: ${{ inputs.environment }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy: strategy:

View file

@ -1,279 +0,0 @@
name: Test Artifact
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
pkg-type:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
testing-releases:
required: true
type: string
description: A JSON list of releases to test upgrades against
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
fips:
required: false
type: boolean
default: false
description: Test run with FIPS enabled
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
nox-session:
required: false
type: string
description: The nox session to run
default: ci-test-onedir
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
USE_S3_CACHE: 'true'
jobs:
generate-matrix:
name: Generate Matrix
runs-on:
# We need to run on our self-hosted runners because we need proper credentials
# for boto3 to scan through our repositories.
- self-hosted
- linux
- x86_64
outputs:
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Generate Package Test Matrix
id: generate-pkg-matrix
run: |
tools ci pkg-matrix ${{ inputs.distro-slug }} \
${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
test:
name: Test
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
path: artifacts/pkg/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: List Packages
run: |
tree artifacts/pkg/
- name: Download nox.linux.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-linux-${{ inputs.arch }}-${{ inputs.nox-session }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info
run: |
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }}
- name: Run Package Tests
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ inputs.fips && '--fips ' || '' }}\
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/
!artifacts/pkg/*
!artifacts/salt/*
!artifacts/salt-*.tar.*
include-hidden-files: true
report:
name: Report
runs-on: ubuntu-latest
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- generate-matrix
- test
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Wait For Artifacts
run: |
sleep 60
- name: Merge Test Run Artifacts
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}-*
separate-directories: true
delete-merged: true
- name: Wait For Artifacts 2
run: |
sleep 60
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}${{ inputs.fips && '-fips' || '' }}-${{ inputs.pkg-type }}*
merge-multiple: true
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.outcome == 'success'
run: |
tree -a artifacts

View file

@ -1,280 +0,0 @@
name: Test Artifact
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
runner:
required: true
type: string
description: The GitHub runner name
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
pkg-type:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
testing-releases:
required: true
type: string
description: A JSON list of releases to test upgrades against
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
nox-session:
required: false
type: string
description: The nox session to run
default: ci-test-onedir
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
env:
COLUMNS: 190
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
jobs:
generate-matrix:
name: Generate Matrix
runs-on:
# We need to run on our self-hosted runners because we need proper credentials
# for boto3 to scan through our repositories.
- self-hosted
- linux
- x86_64
outputs:
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Generate Package Test Matrix
id: generate-pkg-matrix
run: |
tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
test:
name: Test
runs-on: ${{ inputs.runner }}
timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time)
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
path: artifacts/pkg/
- name: Install System Dependencies
run: |
brew install tree
- name: List Packages
run: |
tree artifacts/pkg/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Download nox.macos.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-macos-${{ inputs.arch }}-${{ inputs.nox-session }}
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- macos ${{ inputs.arch }}
- name: Show System Info
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_SYSTEM_INFO_ONLY: "1"
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
- name: Run Package Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
COVERAGE_CONTEXT: ${{ inputs.distro-slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Upload Test Run Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/
!artifacts/pkg/*
!artifacts/salt/*
!artifacts/salt-*.tar.*
include-hidden-files: true
report:
name: Report
runs-on: ubuntu-latest
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- generate-matrix
- test
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Wait For Artifacts
run: |
sleep 60
- name: Merge Test Run Artifacts
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-*
separate-directories: true
delete-merged: true
- name: Wait For Artifacts 2
run: |
sleep 60
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}*
merge-multiple: true
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.outcome == 'success'
run: |
tree -a artifacts
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple

View file

@ -1,277 +0,0 @@
name: Test Artifact
on:
workflow_call:
inputs:
distro-slug:
required: true
type: string
description: The OS slug to run tests against
platform:
required: true
type: string
description: The platform being tested
arch:
required: true
type: string
description: The platform arch being tested
pkg-type:
required: true
type: string
description: The platform arch being tested
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
testing-releases:
required: true
type: string
description: A JSON list of releases to test upgrades against
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
fips:
required: false
type: boolean
default: false
description: Test run with FIPS enabled
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
nox-session:
required: false
type: string
description: The nox session to run
default: ci-test-onedir
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
jobs:
generate-matrix:
name: Generate Matrix
runs-on:
# We need to run on our self-hosted runners because we need proper credentials
# for boto3 to scan through our repositories.
- self-hosted
- linux
- x86_64
outputs:
pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }}
build-reports: ${{ steps.generate-pkg-matrix.outputs.build-reports }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Generate Package Test Matrix
id: generate-pkg-matrix
run: |
tools ci pkg-matrix ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \
${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }}
test:
name: Test
runs-on:
- self-hosted
- linux
- bastion
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
needs:
- generate-matrix
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }}
path: artifacts/pkg/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz
- name: List Packages
run: |
tree artifacts/pkg/
- name: Download nox.windows.${{ inputs.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-windows-${{ inputs.arch }}-${{ inputs.nox-session }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: Get Salt Project GitHub Actions Bot Environment
run: |
TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30")
SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment)
echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV"
- name: Start VM
id: spin-up-vm
run: |
tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }}
- name: List Free Space
run: |
tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true
- name: Upload Checkout To VM
run: |
tools --timestamps vm rsync ${{ inputs.distro-slug }}
- name: Decompress .nox Directory
run: |
tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }}
- name: Show System Info
run: |
tools --timestamps --timeout-secs=1800 vm test --skip-requirements-install --print-system-information-only \
--nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }}
- name: Run Package Tests
run: |
tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ matrix.fips && '--fips ' || '' }}\
--nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
- name: Download Test Run Artifacts
id: download-artifacts-from-vm
if: always() && steps.spin-up-vm.outcome == 'success'
run: |
tools --timestamps vm download-artifacts ${{ inputs.distro-slug }}
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Destroy VM
if: always()
run: |
tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true
- name: Upload Test Run Artifacts
if: always() && steps.download-artifacts-from-vm.outcome == 'success'
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-${{ inputs.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/
!artifacts/pkg/*
!artifacts/salt/*
!artifacts/salt-*.tar.*
include-hidden-files: true
report:
name: Report
runs-on: ubuntu-latest
if: always() && fromJSON(needs.generate-matrix.outputs.build-reports) && needs.test.result != 'cancelled' && needs.test.result != 'skipped'
needs:
- generate-matrix
- test
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Wait For Artifacts
run: |
sleep 60
- name: Merge Test Run Artifacts
uses: actions/upload-artifact/merge@v4
continue-on-error: true
with:
name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}-*
separate-directories: true
delete-merged: true
- name: Wait For Artifacts 2
run: |
sleep 60
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
pattern: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ inputs.pkg-type }}*
merge-multiple: true
- name: Show Test Run Artifacts
if: always() && steps.download-test-run-artifacts.outcome == 'success'
run: |
tree -a artifacts

View file

@ -0,0 +1,516 @@
---
name: Test Packages
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version of the packages to install and test
cache-prefix:
required: true
type: string
description: Seed used to invalidate caches
testing-releases:
required: true
type: string
description: A JSON list of releases to test upgrades against
nox-version:
required: true
type: string
description: The nox version to install
python-version:
required: false
type: string
description: The python version to run tests with
default: "3.10"
nox-session:
required: false
type: string
description: The nox session to run
default: ci-test-onedir
skip-code-coverage:
required: false
type: boolean
description: Skip code coverage
default: false
package-name:
required: false
type: string
description: The onedir package name to use
default: salt
matrix:
required: true
type: string
description: Json job matrix config
linux_arm_runner:
required: true
type: string
description: Json job matrix config
env:
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: ${{ vars.PIP_INDEX_URL }}
PIP_TRUSTED_HOST: ${{ vars.PIP_TRUSTED_HOST }}
PIP_EXTRA_INDEX_URL: ${{ vars.PIP_EXTRA_INDEX_URL }}
PIP_DISABLE_PIP_VERSION_CHECK: "1"
RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1"
USE_S3_CACHE: 'false'
jobs:
test-linux:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
runs-on: ${{ matrix.arch == 'x86_64' && 'ubuntu-24.04' || inputs.linux_arm_runner }}
if: ${{ toJSON(fromJSON(inputs.matrix)['linux']) != '[]' }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(inputs.matrix)['linux'] }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ matrix.arch }}-${{ matrix.pkg_type }}
path: artifacts/pkg/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: List Packages
run: |
tree artifacts/pkg/
- name: Download nox.linux.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-linux-${{ matrix.arch }}-${{ inputs.nox-session }}
- name: "Ensure docker is running"
run: |
sudo systemctl start containerd || exit 0
- name: "Pull container ${{ matrix.container }}"
run: |
docker pull ${{ matrix.container }}
- name: "Create container ${{ matrix.container }}"
run: |
/usr/bin/docker create --name ${{ github.run_id }}_salt-test-pkg --workdir /__w/salt/salt --privileged -e "HOME=/github/home" -e GITHUB_ACTIONS=true -e CI=true -v "/var/run/docker.sock":"/var/run/docker.sock" -v "/home/runner/work":"/__w" -v "/home/runner/work/_temp":"/__w/_temp" -v "/home/runner/work/_actions":"/__w/_actions" -v "/opt/hostedtoolcache":"/__t" -v "/home/runner/work/_temp/_github_home":"/github/home" -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" --entrypoint "/usr/lib/systemd/systemd" ${{ matrix.container }} --systemd --unit rescue.target
- name: "Start container ${{ matrix.container }}"
run: |
/usr/bin/docker start ${{ github.run_id }}_salt-test-pkg
- name: Decompress .nox Directory
run: |
docker exec ${{ github.run_id}}_salt-test-pkg python3 -m nox --force-color -e decompress-dependencies -- linux ${{ matrix.arch }}
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
with:
cache-prefix: ${{ inputs.cache-prefix }}
- name: List Free Space
run: |
df -h || true
- name: Show System Info
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_SYSTEM_INFO_ONLY: "1"
run: |
docker exec ${{ github.run_id }}_salt-test-pkg python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
- name: Run Package Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
COVERAGE_CONTEXT: ${{ matrix.slug }}
run: |
/usr/bin/docker exec ${{ github.run_id }}_salt-test-pkg \
python3 -m nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
- name: Upload Test Run Log Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/logs
include-hidden-files: true
- name: Upload Test Run Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/
!artifacts/pkg/*
!artifacts/salt/*
!artifacts/salt-*.tar.*
include-hidden-files: true
test-macos:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
runs-on: ${{ matrix.runner }}
if: ${{ toJSON(fromJSON(inputs.matrix)['macos']) != '[]' }}
timeout-minutes: 150 # 2 & 1/2 Hours - More than this and something is wrong (MacOS needs a little more time)
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(inputs.matrix)['macos'] }}
steps:
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v4
with:
name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos
path: artifacts/pkg/
- name: Install System Dependencies
run: |
brew install tree
- name: List Packages
run: |
tree artifacts/pkg/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
path: artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- name: Download nox.macos.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-macos-${{ matrix.arch }}-${{ inputs.nox-session }}
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- macos ${{ matrix.arch }}
- name: Show System Info
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_SYSTEM_INFO_ONLY: "1"
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
- name: Run Package Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
COVERAGE_CONTEXT: ${{ matrix.slug }}
run: |
sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
- name: Fix file ownership
run: |
sudo chown -R "$(id -un)" .
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always()
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
tree -a artifacts
- name: Upload Test Run Log Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/logs
include-hidden-files: true
- name: Upload Test Run Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-artifacts-${{ matrix.slug }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/
!artifacts/pkg/*
!artifacts/salt/*
!artifacts/salt-*.tar.*
include-hidden-files: true
test-windows:
name: ${{ matrix.display_name }} ${{ matrix.tests-chunk }}
runs-on: ${{ matrix.slug }}
timeout-minutes: 120 # 2 Hours - More than this and something is wrong
if: ${{ toJSON(fromJSON(inputs.matrix)['windows']) != '[]' }}
strategy:
fail-fast: false
matrix:
include: ${{ fromJSON(inputs.matrix)['windows'] }}
steps:
- name: Set up Python ${{ inputs.python-version }}
uses: actions/setup-python@v5
with:
python-version: "${{ inputs.python-version }}"
- name: "Throttle Builds"
shell: bash
run: |
t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t"
- name: "Set `TIMESTAMP` environment variable"
shell: bash
run: |
echo "TIMESTAMP=$(date +%s)" | tee -a "$GITHUB_ENV"
- name: Checkout Source Code
uses: actions/checkout@v4
- name: Download Packages
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ matrix.arch }}-${{ matrix.pkg_type }}
path: ./artifacts/pkg/
- name: Download Onedir Tarball as an Artifact
uses: actions/download-artifact@v4
with:
name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
path: ./artifacts/
- name: Decompress Onedir Tarball
shell: bash
run: |
python3 -c "import os; os.makedirs('artifacts', exist_ok=True)"
cd artifacts
tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz
- name: Install Nox
run: |
python3 -m pip install 'nox==${{ inputs.nox-version }}'
env:
PIP_INDEX_URL: https://pypi.org/simple
- run: python3 --version
- name: Download nox.windows.${{ matrix.arch }}.tar.* artifact for session ${{ inputs.nox-session }}
uses: actions/download-artifact@v4
with:
name: nox-windows-${{ matrix.arch }}-${{ inputs.nox-session }}
- name: Decompress .nox Directory
run: |
nox --force-color -e decompress-dependencies -- windows ${{ matrix.arch }}
- name: List Important Directories
run: |
dir d:/
dir .
dir artifacts/
dir artifacts/pkg
dir .nox/ci-test-onedir/Scripts
- name: Check onedir python
continue-on-error: true
run: |
artifacts/salt/Scripts/python.exe --version
- name: Check nox python
continue-on-error: true
run: |
.nox/ci-test-onedir/Scripts/python.exe --version
- name: Show System Info
env:
SKIP_REQUIREMENTS_INSTALL: "1"
SKIP_CODE_COVERAGE: "1"
PRINT_SYSTEM_INFO_ONLY: "1"
PYTHONUTF8: "1"
run: |
nox --force-color -f noxfile.py -e "${{ inputs.nox-session }}-pkgs" -- '${{ matrix.tests-chunk }}' --log-cli-level=debug
- name: Run Package Tests
env:
SKIP_REQUIREMENTS_INSTALL: "1"
PRINT_TEST_SELECTION: "0"
PRINT_TEST_PLAN_ONLY: "0"
PRINT_SYSTEM_INFO: "0"
RERUN_FAILURES: "1"
GITHUB_ACTIONS_PIPELINE: "1"
SKIP_INITIAL_ONEDIR_FAILURES: "1"
SKIP_INITIAL_GH_ACTIONS_FAILURES: "1"
COVERAGE_CONTEXT: ${{ matrix.slug }}
OUTPUT_COLUMNS: "190"
PYTHONUTF8: "1"
run: >
nox --force-color -f noxfile.py -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }}
${{ matrix.version && format('--prev-version={0}', matrix.version) || ''}}
- name: Prepare Test Run Artifacts
id: download-artifacts-from-vm
if: always()
shell: bash
run: |
# Delete the salt onedir, we won't need it anymore and it will prevent
# from it showing in the tree command below
rm -rf artifacts/salt*
if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then
mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ matrix.slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}
fi
- name: Upload Test Run Log Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-log-artifacts-${{ matrix.slug }}-${{ inputs.nox-session }}-${{ matrix.transport }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/logs
include-hidden-files: true
- name: Upload Test Run Artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: pkg-testrun-artifacts-${{ matrix.slug }}-${{ matrix.pkg_type }}-${{ matrix.arch }}-${{ matrix.tests-chunk }}-${{ matrix.version || 'no-version'}}-${{ env.TIMESTAMP }}
path: |
artifacts/
!artifacts/pkg/*
!artifacts/salt/*
!artifacts/salt-*.tar.*
include-hidden-files: true
report:
name: Report
runs-on: ubuntu-22.04
if: ${{ false }}
needs:
- test-linux
- test-macos
- test-windows
strategy:
matrix:
include: ${{ fromJSON(inputs.matrix)['linux'] }}
steps:
- name: Checkout Source Code
uses: actions/checkout@v4
- name: "Throttle Builds"
shell: bash
run: |
t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t"
- name: Wait For Artifacts
run: |
sleep 60
- name: Merge Test Run Artifacts
continue-on-error: true
uses: actions/upload-artifact/merge@v4
with:
name: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}
pattern: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}-*
separate-directories: true
delete-merged: true
- name: Wait For Artifacts 2
run: |
sleep 60
- name: Download Test Run Artifacts
id: download-test-run-artifacts
uses: actions/download-artifact@v4
with:
path: artifacts/
pattern: pkg-testrun-artifacts-${{ matrix.slug }}${{ matrix.fips && '-fips' || '' }}-${{ matrix.pkg_type }}*
merge-multiple: true
- name: Show Test Run Artifacts
if: always()
run: |
tree -a artifacts

30
.github/workflows/workflow-finished.yml vendored Normal file
View file

@ -0,0 +1,30 @@
name: Workflow Finished
run-name: Workflow Finished ${{ github.event.workflow_run.display_title }} (${{ github.event.workflow_run.conclusion }})
on:
workflow_run:
workflows:
- CI
- Nightly
- Scheduled
- Stage Release
types:
- completed
permissions:
contents: read
pull-requests: read
actions: write
jobs:
restart-failed-jobs:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.run_attempt < 5 }}
steps:
- name: Restart failed jobs
env:
GH_REPO: ${{ github.repository }}
GH_TOKEN: ${{ github.token }}
run: |
gh run rerun ${{ github.event.workflow_run.id }} --failed

View file

@ -59,7 +59,7 @@ repos:
- id: tools - id: tools
alias: generate-workflows alias: generate-workflows
name: Generate GitHub Workflow Templates name: Generate GitHub Workflow Templates
files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/.*)$ files: ^(cicd/shared-gh-workflows-context\.yml|tools/utils/__init__.py|tools/precommit/workflows\.py|.github/workflows/.*)$
pass_filenames: false pass_filenames: false
args: args:
- pre-commit - pre-commit

112
AUTHORS
View file

@ -8,114 +8,28 @@ Whos Who in Salt
The Man With the Plan The Man With the Plan
---------------------------- ----------------------------
Thomas S. Hatch is the main developer of Salt. He is the founder, owner, Thomas S. Hatch is the creator of Salt. He was the founder, owner,
maintainer and lead of the Salt project, as well as author of the majority maintainer that lead Salt project, as well as author of the majority
of the Salt code and documentation. of initial Salt code and documentation.
SaltStack, Inc. was acquired by VMware in 2020. In 2023, VMware was
acquired by Broadcom.
The Salt Project core team of developers are employed by Broadcom.
Documentation System Documentation System
---------------------------- ----------------------------
The documentation system was put together by Seth House, much of the The initial documentation system was put together by Seth House.
documentation is being maintained by Seth.
Developers
----------------------------
Aaron Bull Schaefer <aaron@elasticdog.com>
Aaron Toponce <aaron.toponce@gmail.com>
Andrew Hammond <andrew.george.hammond@gmail.com>
Aditya Kulkarni <adi@saltstack.com>
Alexander Pyatkin <asp@thexyz.net>
Andre Sachs <andre@sachs.nom.za>
Andrew Colin Kissa <andrew@topdog.za.net>
Andrew Kuhnhausen <trane@errstr.com>
Antti Kaihola <akaihol+github@ambitone.com>
archme <archme.mail@gmail.com>
Brad Barden <brad@mifflinet.net>
Bret Palsson <bretep@gmail.com>
Brian Wagner <wags@wagsworld.net>
C. R. Oldham <cr@saltstack.com>
Carl Loa Odin <carlodin@gmail.com>
Carlo Pires <carlopires@gmail.com>
Chris Rebert <chris.rebert@hulu.com>
Chris Scheller <schelcj@umich.edu>
Christer Edwards <christer.edwards@gmail.com>
Clint Savage <herlo1@gmail.com>
Colton Myers <cmyers@saltstack.com>
Corey Quinn <corey@sequestered.net>
Corin Kochenower <ckochenower@saltstack.com>
Dan Garthwaite <dan@garthwaite.org>
Daniel Wallace <danielwallace at gtmanfred dot com>
David Boucha <boucha@gmail.com>
David Pravec <alekibango@pravec.tk>
deutsche
Dmitry Kuzmenko <dkuzmenko@saltstack.com>
Doug Renn <renn@nestegg.com>
Eivind Uggedal <eivind@uggedal.com>
epoelke@gmail.com <epoelke@heartflow.com>
Eric Poelke <epoelke@gmail.com>
Erik Nolte <enolte@beyondoblivion.com>
Evan Borgstrom <evan@fatbox.ca>
Forrest Alvarez <forrest.alvarez@gmail.com>
Fred Reimer <freimer@freimer.org>
Henrik Holmboe <henrik@holmboe.se>
Gareth J. Greenaway <gareth@wiked.org>
Jacob Albretsen <jakea@xmission.com>
Jed Glazner <jglazner@coldcrow.com>
Jeff Bauer <jbauer@rubic.com>
Jeff Hutchins <jhutchins@getjive.com>
Jeffrey C. Ollie <jeff@ocjtech.us>
Jeff Schroeder <jeffschroeder@computer.org>
Johnny Bergström
Jonas Buckner <buckner.jonas@gmail.com>
Jonathan Harker <k.jonathan.harker@hp.com>
Joseph Hall <joseph@saltstack.com>
Josmar Dias <josmarnet@gmail.com>
Kent Tenney <ktenney@gmail.com>
lexual
Marat Shakirov
Marc Abramowitz <marc+github@marc-abramowitz.com>
Martin Schnabel <mb0@mb0.org>
Mathieu Le Marec - Pasquet <kiorky@cryptelium.net>
Matt Black
Matthew Printz <hipokrit@gmail.com>
Matthias Teege <matthias-git@mteege.de>
Maxim Burgerhout <maxim@wzzrd.com>
Mickey Malone <mickey.malone@gmail.com>
Michael Steed <msteed@saltstack.com>
Mike Place <mp@saltstack.com>
Mircea Ulinic <ping@mirceaulinic.net>
Mitch Anderson <mitch@metauser.net>
Mostafa Hussein <mostafa.hussein91@gmail.com>
Nathaniel Whiteinge <seth@eseth.com>
Nicolas Delaby <nicolas.delaby@ezeep.com>
Nicole Thomas <nicole@saltstack.com>
Nigel Owen <nigelowen2.gmail.com>
Nitin Madhok <nmadhok@g.clemson.edu>
Oleg Anashkin <oleg.anashkin@gmail.com>
Pedro Algarvio <pedro@algarvio.me>
Peter Baumgartner
Pierre Carrier <pierre@spotify.com>
Rhys Elsmore <me@rhys.io>
Rafael Caricio <rafael@caricio.com>
Robert Fielding
Sean Channel <pentabular@gmail.com>
Seth House <seth@eseth.com>
Seth Vidal <skvidal@fedoraproject.org>
Stas Alekseev <stas.alekseev@gmail.com>
Thibault Cohen <titilambert@gmail.com>
Thomas Schreiber <tom@rizumu.us>
Thomas S Hatch <thatch45@gmail.com>
Tor Hveem <xt@bash.no>
Travis Cline <travis.cline@gmail.com>
Wieland Hoffmann <themineo+github@gmail.com>
Documentation is now primarily maintained by the Salt Project core team and
community members.
Growing Community Growing Community
-------------------------------- --------------------------------
Salt is a rapidly growing project with a large community, to view all Salt is a rapidly growing project with a large community, and has had more than
contributors please check Github, this file can sometimes be out of date: 2,400 contributors over the years. To view all contributors, please check Github:
https://github.com/saltstack/salt/graphs/contributors https://github.com/saltstack/salt/graphs/contributors

View file

@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at reported to the community leaders responsible for enforcement at
conduct@saltstack.com. saltproject.pdl@broadcom.com.
All complaints will be reviewed and investigated promptly and fairly. All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the All community leaders are obligated to respect the privacy and security of the

View file

@ -8,7 +8,7 @@ ways you can help improve Salt:
- Use Salt and report bugs with clear, detailed descriptions. - Use Salt and report bugs with clear, detailed descriptions.
- Join a `working group <https://github.com/saltstack/community>`__ to - Join a `working group <https://github.com/saltstack/community>`__ to
collaborate with other contributors. collaborate with other contributors.
- Answer questions on platforms like `IRC <https://web.libera.chat/#salt>`__, - Answer questions on platforms like
the `community Discord <https://discord.com/invite/J7b7EscrAs>`__, the `community Discord <https://discord.com/invite/J7b7EscrAs>`__,
the `salt-users mailing list <https://groups.google.com/forum/#!forum/salt-users>`__, the `salt-users mailing list <https://groups.google.com/forum/#!forum/salt-users>`__,
`Server Fault <https://serverfault.com/questions/tagged/saltstack>`__, `Server Fault <https://serverfault.com/questions/tagged/saltstack>`__,
@ -135,7 +135,7 @@ Then activate it:
Sweet! Now you're ready to clone Salt so you can start hacking away! If Sweet! Now you're ready to clone Salt so you can start hacking away! If
you get stuck at any point, check out the resources at the beginning of you get stuck at any point, check out the resources at the beginning of
this guide. IRC and Discord are particularly helpful places to go. this guide. Discord and GitHub Discussions are particularly helpful places to go.
Get the source! Get the source!
@ -631,7 +631,7 @@ your PR is submitted during the week you should be able to expect some
kind of communication within that business day. If your tests are kind of communication within that business day. If your tests are
passing and we're not in a code freeze, ideally your code will be merged passing and we're not in a code freeze, ideally your code will be merged
that week or month. If you haven't heard from your assigned reviewer, ping them that week or month. If you haven't heard from your assigned reviewer, ping them
on GitHub, `irc <https://web.libera.chat/#salt>`__, or Community Discord. on GitHub or `Community Discord <https://discord.com/invite/J7b7EscrAs>`__.
It's likely that your reviewer will leave some comments that need It's likely that your reviewer will leave some comments that need
addressing - it may be a style change, or you forgot a changelog entry, addressing - it may be a style change, or you forgot a changelog entry,

View file

@ -6,18 +6,10 @@
:alt: PyPi Package Downloads :alt: PyPi Package Downloads
:target: https://pypi.org/project/salt :target: https://pypi.org/project/salt
.. image:: https://img.shields.io/lgtm/grade/python/github/saltstack/salt
:alt: PyPi Package Downloads
:target: https://lgtm.com/projects/g/saltstack/salt/context:python
.. image:: https://img.shields.io/badge/discord-SaltProject-blue.svg?logo=discord .. image:: https://img.shields.io/badge/discord-SaltProject-blue.svg?logo=discord
:alt: Salt Project Discord Community :alt: Salt Project Discord Community
:target: https://discord.com/invite/J7b7EscrAs :target: https://discord.com/invite/J7b7EscrAs
.. image:: https://img.shields.io/twitch/status/saltprojectoss
:alt: Salt Project Twitch Channel
:target: https://www.twitch.tv/saltprojectoss
.. image:: https://img.shields.io/reddit/subreddit-subscribers/saltstack?style=social .. image:: https://img.shields.io/reddit/subreddit-subscribers/saltstack?style=social
:alt: Salt Project subreddit :alt: Salt Project subreddit
:target: https://www.reddit.com/r/saltstack/ :target: https://www.reddit.com/r/saltstack/
@ -71,20 +63,21 @@ In addition to configuration management Salt can also:
About our sponsors About our sponsors
================== ==================
Salt powers VMware's `VMware Aria Automation Config`_
(previously vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found Salt powers VMware by Broadcom's `Tanzu Salt`_
(previously Aria Automation Config / vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found
under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and
Tieto, to name a few. Tieto, to name a few.
The original sponsor of our community, SaltStack, was `acquired by VMware in 2020 <https://www.vmware.com/company/acquisitions/saltstack.html>`_. The original sponsor of our community, SaltStack, was acquired by VMware in 2020.
The Salt Project remains an open source ecosystem that VMware supports and `VMware was later acquired by Broadcom in 2023 <https://investors.broadcom.com/news-releases/news-release-details/broadcom-completes-acquisition-vmware>`__.
contributes to. VMware ensures the code integrity and quality of the Salt The Salt Project remains an open source ecosystem that Broadcom supports and
contributes to. Broadcom ensures the code integrity and quality of the Salt
modules by acting as the official sponsor and manager of the Salt project. Many modules by acting as the official sponsor and manager of the Salt project. Many
of the core Salt Project contributors are also VMware employees. This team of the core Salt Project contributors are also Broadcom employees. This team
carefully reviews and enhances the Salt modules to ensure speed, quality, and carefully reviews and enhances the Salt modules to ensure speed, quality, and
security. security.
Download and install Salt Download and install Salt
========================= =========================
Salt is tested and packaged to run on CentOS, Debian, RHEL, Ubuntu, MacOS, Salt is tested and packaged to run on CentOS, Debian, RHEL, Ubuntu, MacOS,
@ -93,9 +86,11 @@ Windows, and more. Download Salt and get started now. See
for more information. for more information.
To download and install Salt, see: To download and install Salt, see:
* `The Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/index.html>`_
* `Salt Project repository <https://repo.saltproject.io/>`_
* `The Salt install guide <https://docs.saltproject.io/salt/install-guide/en/latest/index.html>`_
* `Salt Project Repository: Linux (RPM) <https://packages.broadcom.com/artifactory/saltproject-rpm>`__ - Where Salt ``rpm`` packages are officially stored and distributed.
* `Salt Project Repository: Linux (DEB) <https://packages.broadcom.com/artifactory/saltproject-deb>`__ - Where Salt ``deb`` packages are officially stored and distributed.
* `Salt Project Repository: GENERIC <https://packages.broadcom.com/artifactory/saltproject-generic>`__ - Where Salt Windows, macOS, etc. (non-rpm, non-deb) packages are officially stored and distributed.
Technical support Technical support
================= =================
@ -153,11 +148,9 @@ Please be sure to review our
`Code of Conduct <https://github.com/saltstack/salt/blob/master/CODE_OF_CONDUCT.md>`_. `Code of Conduct <https://github.com/saltstack/salt/blob/master/CODE_OF_CONDUCT.md>`_.
Also, check out some of our community resources including: Also, check out some of our community resources including:
* `Salt Project Community Wiki <https://github.com/saltstack/community/wiki>`_
* `Salt Project Community Discord`_ * `Salt Project Community Discord`_
* `Salt Project: IRC on LiberaChat <https://web.libera.chat/#salt>`_
* `Salt Project YouTube channel <https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg>`_ * `Salt Project YouTube channel <https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg>`_
* `Salt Project Twitch channel <https://www.twitch.tv/saltprojectoss>`_ * `Salt Project Community Notes and Wiki <https://github.com/saltstack/community/>`_
There are lots of ways to get involved in our community. Every month, there are There are lots of ways to get involved in our community. Every month, there are
around a dozen opportunities to meet with other contributors and the Salt Core around a dozen opportunities to meet with other contributors and the Salt Core
@ -165,10 +158,9 @@ team and collaborate in real time. The best way to keep track is by subscribing
to the **Salt Project Community Events Calendar** on the main to the **Salt Project Community Events Calendar** on the main
`<https://saltproject.io>`_ website. `<https://saltproject.io>`_ website.
If you have additional questions, email us at saltproject@vmware.com or reach out If you have additional questions, email us at saltproject.pdl@broadcom.com or reach out
directly to the Community Discord. We'd be glad to have you join our community! directly to the Community Discord. We'd be glad to have you join our community!
License License
======= =======
Salt is licensed under the Apache 2.0 license. Please Salt is licensed under the Apache 2.0 license. Please
@ -181,9 +173,7 @@ A complete list of attributions and dependencies can be found here:
`salt/DEPENDENCIES.md <https://github.com/saltstack/salt/blob/master/DEPENDENCIES.md>`_ `salt/DEPENDENCIES.md <https://github.com/saltstack/salt/blob/master/DEPENDENCIES.md>`_
.. _Salt Project Community Discord: https://discord.com/invite/J7b7EscrAs .. _Salt Project Community Discord: https://discord.com/invite/J7b7EscrAs
.. _VMware Aria Automation Config: https://www.vmware.com/products/vrealize-automation/saltstack-config.html .. _Tanzu Salt: https://www.vmware.com/products/app-platform/tanzu-salt
.. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/ .. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/
.. _Open an issue: https://github.com/saltstack/salt/issues/new/choose .. _Open an issue: https://github.com/saltstack/salt/issues/new/choose
.. _SECURITY.md: https://github.com/saltstack/salt/blob/master/SECURITY.md .. _SECURITY.md: https://github.com/saltstack/salt/blob/master/SECURITY.md
.. _Calendar html: https://outlook.office365.com/owa/calendar/105f69bacd4541baa849529aed37eb2d@vmware.com/434ec2155b2b4cce90144c87f0dd03d56626754050155294962/calendar.html
.. _Calendar ics: https://outlook.office365.com/owa/calendar/105f69bacd4541baa849529aed37eb2d@vmware.com/434ec2155b2b4cce90144c87f0dd03d56626754050155294962/calendar.ics

View file

@ -1,15 +1,8 @@
Get SaltStack Support and Help Get Salt Project Support and Help
============================== =================================
**IRC Chat** - Join the vibrant, helpful and positive SaltStack chat room in **Salt Project Discord** - Join the Salt Project Community Discord!
LiberaChat at #salt. There is no need to introduce yourself, or ask permission Use the following link to join the Discord server:
to join in, just help and be helped! Make sure to wait for an answer, sometimes
it may take a few moments for someone to reply.
`<https://web.libera.chat/#salt>`_
**SaltStack Slack** - Alongside IRC is our SaltStack Community Discord for the
SaltStack Working groups. Use the following link to request an invitation.
`<https://discord.com/invite/J7b7EscrAs>`_ `<https://discord.com/invite/J7b7EscrAs>`_
@ -20,13 +13,13 @@ anyone can help answer. Join the conversation!
`<https://groups.google.com/forum/#!forum/salt-users>`_ `<https://groups.google.com/forum/#!forum/salt-users>`_
You may subscribe to the list without a Google account by emailing You may subscribe to the list without a Google account by emailing
salt-users+subscribe@googlegroups.com and you may post to the list by emailing ``salt-users+subscribe@googlegroups.com`` and you may post to the list by emailing
salt-users@googlegroups.com ``salt-users@googlegroups.com``
**Reporting Issues** - To report an issue with Salt, please follow the **Reporting Issues** - To report an issue with Salt, please follow the
guidelines for filing bug reports: guidelines for filing bug reports:
`<https://docs.saltproject.io/en/master/topics/development/reporting_bugs.html>`_ `<https://docs.saltproject.io/en/master/topics/development/reporting_bugs.html>`_
**SaltStack Support** - If you need dedicated, prioritized support, please **Salt Project Support** - If you need dedicated, prioritized support, please
consider a SaltStack Support package that fits your needs: consider taking a look at the Enterprise product:
`<http://www.saltstack.com/support>`_ `Tanzu Salt <https://www.vmware.com/products/app-platform/tanzu-salt>`__

2
changelog/66992.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixes an issue with the LGPO module when trying to parse ADMX/ADML files
that have a space in the XMLNS url in the policyDefinitionsResources header.

1
changelog/67017.fixed.md Normal file
View file

@ -0,0 +1 @@
Update for deprecation of hex in pygit2 1.15.0 and above

1
changelog/67019.fixed.md Normal file
View file

@ -0,0 +1 @@
Fixed blob path for salt.ufw in the firewall tutorial documentation

1
changelog/67020.fixed.md Normal file
View file

@ -0,0 +1 @@
Update locations for bootstrap scripts, to new infrastructure, GitHub releases for bootstrap

1
changelog/67058.fixed.md Normal file
View file

@ -0,0 +1 @@
Recognise newer AMD GPU devices

2
changelog/67122.fixed.md Normal file
View file

@ -0,0 +1,2 @@
Fixed an issue with making changes to the Windows Firewall when the
AllowInboundRules setting is set to True

View file

@ -1,13 +1,17 @@
nox_version: "2022.8.7" nox_version: "2022.8.7"
python_version: "3.10.15" python_version: "3.10.15"
relenv_version: "0.17.3" relenv_version: "0.18.0"
release_branches: release_branches:
- "3006.x" - "3006.x"
- "3007.x" - "3007.x"
mandatory_os_slugs: pr-testrun-slugs:
- ubuntu-24.04-pkg
- ubuntu-24.04
- rockylinux-9 - rockylinux-9
- amazonlinux-2023-arm64 - rockylinux-9-pkg
- photonos-5-arm64
- macos-12
- ubuntu-24.04-arm64
- windows-2022 - windows-2022
- windows-2022-pkg
- macos-15
- macos-15-pkg
full-testrun-slugs:
- all

View file

@ -248,8 +248,8 @@
</div> </div>
<div class="footerCol"> <div class="footerCol">
<h4>Community</h4> <h4>Community</h4>
<a href="http://saltstack.org">saltstack.org</a> <a href="http://saltproject.io">saltproject.io</a>
<a href="http://docs.saltstack.org/en/latest/">Documentation</a> <a href="http://docs.saltproject.io/en/latest/">Documentation</a>
<!-- <a href="#">Blogs</a> --> <!-- <a href="#">Blogs</a> -->
</div> </div>
</div> </div>

View file

@ -163,16 +163,11 @@
<!-- Collect the nav links, forms, and other content for toggling --> <!-- Collect the nav links, forms, and other content for toggling -->
<div class="collapse navbar-collapse" id="navbarCollapse"> <div class="collapse navbar-collapse" id="navbarCollapse">
<ul class="nav navbar-nav"> <ul class="nav navbar-nav">
<li><a href="/en/latest/">Overview</a></li> <li><a href="/en/latest/">Overview</a></li>
<li><a href="https://docs.saltproject.io/salt/user-guide/en/latest/">Salt User Guide</a></li> <li><a href="https://docs.saltproject.io/salt/user-guide/en/latest/">Salt User Guide</a></li>
<li><a href="/en/latest/contents.html">Documentation</a></li> <li><a href="/en/latest/contents.html">Documentation</a></li>
<li><a href="https://repo.saltproject.io">Downloads</a></li> <li><a href="https://packages.broadcom.com/artifactory/saltproject-generic/">Downloads</a></li>
<li><a href="/en/latest/topics/development/">Develop</a></li> <li><a href="/en/latest/topics/development/">Develop</a></li>
<!--<li><a href="/en/2016.3/faq/">FAQ</a></li>
<li><a href="/en/2016.3/samples/">Code Samples</a></li>-->
<!-- <li><a href="https://repo.saltproject.io" target="_blank">Downloads</a></li>-->
<!--<li><a href="http://saltstack.com/training" target="_blank">Training</a></li>
<li><a href="http://saltstack.com/support" target="_blank">Support</a></li>-->
</ul> </ul>
</div> </div>
</div> </div>
@ -295,7 +290,7 @@
{% if on_saltstack %} {% if on_saltstack %}
{# {#
{% if [True, False]|random %} {% if [True, False]|random %}
<a href="http://saltconf.com/register" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a> <a href="http://saltproject.io" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-saltconf.png', 1) }}"/></a>
{% else %} {% else %}
<a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a> <a href="http://saltstack.com/support" target="_blank"><img class="nolightbox sidebar-banner center" src="{{ pathto('_static/images/banner-support.png', 1) }}"/></a>
{% endif %} #}--> {% endif %} #}-->

View file

@ -182,20 +182,6 @@ rst_prolog = """\
.. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce .. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce
.. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers .. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers
.. _`salt-discord`: https://discord.com/invite/J7b7EscrAs .. _`salt-discord`: https://discord.com/invite/J7b7EscrAs
.. |windownload| raw:: html
<p>Python3 x86: <a
href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-x86-Setup.exe"><strong>Salt-Minion-{release}-x86-Setup.exe</strong></a>
| <a href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-x86-Setup.exe.md5"><strong>md5</strong></a></p>
<p>Python3 AMD64: <a
href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe"><strong>Salt-Minion-{release}-AMD64-Setup.exe</strong></a>
| <a href="https://repo.saltproject.io/windows/Salt-Minion-{release}-Py3-AMD64-Setup.exe.md5"><strong>md5</strong></a></p>
.. |osxdownloadpy3| raw:: html
<p>x86_64: <a href="https://repo.saltproject.io/osx/salt-{release}-py3-x86_64.pkg"><strong>salt-{release}-py3-x86_64.pkg</strong></a>
| <a href="https://repo.saltproject.io/osx/salt-{release}-py3-x86_64.pkg.md5"><strong>md5</strong></a></p>
""".format( """.format(
release=stripped_release release=stripped_release

View file

@ -30,7 +30,7 @@ SaltStack the company does make proprietary products which use Salt and its libr
I think I found a bug! What should I do? I think I found a bug! What should I do?
---------------------------------------- ----------------------------------------
The salt-users mailing list as well as the salt IRC channel can both be helpful The salt-users mailing list as well as the Community Discord can both be helpful
resources to confirm if others are seeing the issue and to assist with resources to confirm if others are seeing the issue and to assist with
immediate debugging. immediate debugging.

View file

@ -146,11 +146,8 @@ Before installing the delta proxy minion, ensure that:
Install or upgrade Salt Install or upgrade Salt
----------------------- -----------------------
Ensure your Salt masters are running at least Salt version 3004. For instructions Ensure your Salt masters are running at least Salt version 3004. For instructions
on installing or upgrading Salt, see `repo.saltproject.io on installing or upgrading Salt, see the
<http://repo.saltproject.io/>`_. For RedHat systems, see `Install or Upgrade Salt `Salt Install Guide <https://docs.saltproject.io/salt/install-guide/en/latest/>`_.
<https://enterprise.saltproject.io/en/latest/docs/install-salt.html>`_.
.. _delta-proxy-install: .. _delta-proxy-install:

View file

@ -539,8 +539,6 @@ the module using the following field lists:
.. code-block:: text .. code-block:: text
:maintainer: Thomas Hatch <thatch@saltstack.com, Seth House <shouse@saltstack.com>
:maturity: new
:depends: python-mysqldb :depends: python-mysqldb
:platform: all :platform: all

View file

@ -31,6 +31,21 @@ which Salt Cloud is running. See
and using the Salt Minion Windows installer. and using the Salt Minion Windows installer.
Optionally WinRM can be used instead of `winexe` if the python module `pywinrm`
is available and WinRM is supported on the target Windows version. Information
on pywinrm can be found at the project home:
* `pywinrm project home`__
.. __: https://github.com/diyan/pywinrm
Additionally, a copy of the Salt Minion Windows installer must be present on
the system on which Salt Cloud is running. This installer may be downloaded
from saltstack.com:
* `SaltStack Download Area`__
.. __: https://packages.broadcom.com/artifactory/saltproject-generic/windows/
.. _new-pywinrm: .. _new-pywinrm:

View file

@ -221,14 +221,10 @@ The best way to create new Formula repositories for now is to create a
repository in your own account on GitHub and notify a SaltStack employee when repository in your own account on GitHub and notify a SaltStack employee when
it is ready. We will add you to the Contributors team on the it is ready. We will add you to the Contributors team on the
`saltstack-formulas`_ organization and help you transfer the repository over. `saltstack-formulas`_ organization and help you transfer the repository over.
Ping a SaltStack employee on IRC (`#salt`_ on LiberaChat), join the Join the ``#formulas`` channel on the `salt-discord`_
``#formulas`` channel on the `salt-discord`_ (bridged to ``#saltstack-formulas`` or send an email to the `salt-users`_ mailing list.
on LiberaChat) or send an email to the `salt-users`_ mailing list. Note that
IRC logs are available at http://ngxbot.nginx.org/logs/%23salt/ and archives
for FreeNode (up to mid-June 2021) https://logbot-archive.s3.amazonaws.com/freenode/salt.gz
and https://logbot-archive.s3.amazonaws.com/freenode/saltstack-formulas.gz.
There are a lot of repositories in that organization! Team members can manage Team members can manage
which repositories they are subscribed to on GitHub's watching page: which repositories they are subscribed to on GitHub's watching page:
https://github.com/watching. https://github.com/watching.
@ -246,7 +242,7 @@ your pull request has stayed open for more than a couple days feel free to
"selfie-merge" your own pull request. "selfie-merge" your own pull request.
.. _`at-mention`: https://help.github.com/en/github/writing-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams .. _`at-mention`: https://help.github.com/en/github/writing-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams
.. _`#salt`: https://web.libera.chat/#salt .. _`#salt`: https://discord.com/invite/J7b7EscrAs
Style Style
----- -----

View file

@ -97,7 +97,7 @@ When adding a new function or state, where possible try to use a
print(msg) print(msg)
If you are uncertain what version should be used, either consult a core If you are uncertain what version should be used, either consult a core
developer in IRC or bring this up when opening your :ref:`pull request developer in the Community Discord or bring this up when opening your :ref:`pull request
<installing-for-development>` and a core developer will let you know what <installing-for-development>` and a core developer will let you know what
version to add. Typically this will be the next element in the `periodic table version to add. Typically this will be the next element in the `periodic table
<https://en.wikipedia.org/wiki/List_of_chemical_elements>`_. <https://en.wikipedia.org/wiki/List_of_chemical_elements>`_.

View file

@ -174,8 +174,8 @@ exception is raised, causing the rendering to fail with the following message:
TemplateError: Custom Error TemplateError: Custom Error
Filters Custom Filters
======= ==============
Saltstack extends `builtin filters`_ with these custom filters: Saltstack extends `builtin filters`_ with these custom filters:
@ -405,8 +405,9 @@ This text will be wrapped in quotes.
.. versionadded:: 2017.7.0 .. versionadded:: 2017.7.0
Scan through string looking for a location where this regular expression Looks for a match for the specified regex anywhere in the string. If the string
produces a match. Returns ``None`` in case there were no matches found does not match the regex, this filter returns ``None``. If the string _does_
match the regex, then the `capture groups`_ for the regex will be returned.
Example: Example:
@ -420,6 +421,29 @@ Returns:
("defabcdef",) ("defabcdef",)
If the regex you use does not contain a capture group then the number of
capture groups will be zero, and a matching regex will return an empty tuple.
This means that the following ``if`` statement would evaluate as ``False``:
.. code-block:: jinja
{%- if 'foobar' | regex_search('foo') %}
If you do not need a capture group and are just looking to test if a string
matches a regex, then you should check to see if the filter returns ``None``:
.. code-block:: jinja
{%- if (some_var | regex_search('foo')) is not none %}
.. note::
In a Jinja statement, a null value (i.e. a Python ``None``) should be
expressed as ``none`` (i.e. lowercase). More info on this can be found in
the **Note** section here in the `jinja docs`_.
.. _`capture groups`: https://docs.python.org/3/library/re.html#re.Match.groups
.. _`jinja docs`: https://jinja.palletsprojects.com/en/stable/templates/#literals
.. jinja_ref:: regex_match .. jinja_ref:: regex_match
@ -428,8 +452,8 @@ Returns:
.. versionadded:: 2017.7.0 .. versionadded:: 2017.7.0
If zero or more characters at the beginning of string match this regular Works exactly like :jinja_ref:`regex_search`, but only checks for matches at
expression, otherwise returns ``None``. the _beginning_ of the string passed into this filter.
Example: Example:

View file

@ -176,7 +176,7 @@ to allow traffic on ``tcp/4505`` and ``tcp/4506``:
**Ubuntu** **Ubuntu**
Salt installs firewall rules in :blob:`/etc/ufw/applications.d/salt.ufw Salt installs firewall rules in :blob:`/etc/ufw/applications.d/salt.ufw
<pkg/salt.ufw>`. Enable with: <pkg/common/salt.ufw>`. Enable with:
.. code-block:: bash .. code-block:: bash

View file

@ -1284,7 +1284,10 @@ def decompress_dependencies(session):
if not os.path.isabs(resolved_link): if not os.path.isabs(resolved_link):
# Relative symlinks, resolve them # Relative symlinks, resolve them
resolved_link = os.path.join(scan_path, resolved_link) resolved_link = os.path.join(scan_path, resolved_link)
if not os.path.exists(resolved_link): prefix_check = False
if platform == "windows":
prefix_check = resolved_link.startswith("\\\\?")
if not os.path.exists(resolved_link) or prefix_check:
session.log("The symlink %r looks to be broken", resolved_link) session.log("The symlink %r looks to be broken", resolved_link)
# This is a broken link, fix it # This is a broken link, fix it
resolved_link_suffix = resolved_link.split( resolved_link_suffix = resolved_link.split(
@ -1839,13 +1842,24 @@ def ci_test_onedir_pkgs(session):
session_warn(session, "Replacing VirtualEnv instance...") session_warn(session, "Replacing VirtualEnv instance...")
ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir" ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir"
session._runner.venv = VirtualEnv( if hasattr(session._runner.venv, "venv_or_virtualenv"):
str(ci_test_onedir_path.relative_to(REPO_ROOT)), venv = session._runner.venv.venv_or_virtualenv == "venv"
interpreter=session._runner.func.python, session._runner.venv = VirtualEnv(
reuse_existing=True, str(ci_test_onedir_path.relative_to(REPO_ROOT)),
venv=session._runner.venv.venv_or_virtualenv == "venv", interpreter=session._runner.func.python,
venv_params=session._runner.venv.venv_params, reuse_existing=True,
) venv=venv,
venv_params=session._runner.venv.venv_params,
)
else:
venv = session._runner.venv.venv_backend in ("venv", "virtualenv")
session._runner.venv = VirtualEnv( # pylint: disable=unexpected-keyword-arg
str(ci_test_onedir_path.relative_to(REPO_ROOT)),
interpreter=session._runner.func.python,
reuse_existing=True,
venv_backend=session._runner.venv.venv_backend,
venv_params=session._runner.venv.venv_params,
)
os.environ["VIRTUAL_ENV"] = session._runner.venv.location os.environ["VIRTUAL_ENV"] = session._runner.venv.location
session._runner.venv.create() session._runner.venv.create()

View file

@ -86,55 +86,107 @@ fi
log "Symlink: Creating symlinks for salt..." log "Symlink: Creating symlinks for salt..."
ln -sf "$INSTALL_DIR/salt" "$SBIN_DIR/salt" ln -sf "$INSTALL_DIR/salt" "$SBIN_DIR/salt"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-api..." log "Symlink: Creating symlinks for salt-api..."
ln -sf "$INSTALL_DIR/salt-api" "$SBIN_DIR/salt-api" ln -sf "$INSTALL_DIR/salt-api" "$SBIN_DIR/salt-api"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-api" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-call..." log "Symlink: Creating symlinks for salt-call..."
ln -sf "$INSTALL_DIR/salt-call" "$SBIN_DIR/salt-call" ln -sf "$INSTALL_DIR/salt-call" "$SBIN_DIR/salt-call"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-call" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-cloud..." log "Symlink: Creating symlinks for salt-cloud..."
ln -sf "$INSTALL_DIR/salt-cloud" "$SBIN_DIR/salt-cloud" ln -sf "$INSTALL_DIR/salt-cloud" "$SBIN_DIR/salt-cloud"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-cloud" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-cp..." log "Symlink: Creating symlinks for salt-cp..."
ln -sf "$INSTALL_DIR/salt-cp" "$SBIN_DIR/salt-cp" ln -sf "$INSTALL_DIR/salt-cp" "$SBIN_DIR/salt-cp"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-cp" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-key..." log "Symlink: Creating symlinks for salt-key..."
ln -sf "$INSTALL_DIR/salt-key" "$SBIN_DIR/salt-key" ln -sf "$INSTALL_DIR/salt-key" "$SBIN_DIR/salt-key"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-key" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-master..." log "Symlink: Creating symlinks for salt-master..."
ln -sf "$INSTALL_DIR/salt-master" "$SBIN_DIR/salt-master" ln -sf "$INSTALL_DIR/salt-master" "$SBIN_DIR/salt-master"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-master" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-minion..." log "Symlink: Creating symlinks for salt-minion..."
ln -sf "$INSTALL_DIR/salt-minion" "$SBIN_DIR/salt-minion" ln -sf "$INSTALL_DIR/salt-minion" "$SBIN_DIR/salt-minion"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-minion" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-proxy..." log "Symlink: Creating symlinks for salt-proxy..."
ln -sf "$INSTALL_DIR/salt-proxy" "$SBIN_DIR/salt-proxy" ln -sf "$INSTALL_DIR/salt-proxy" "$SBIN_DIR/salt-proxy"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-proxy" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-run..." log "Symlink: Creating symlinks for salt-run..."
ln -sf "$INSTALL_DIR/salt-run" "$SBIN_DIR/salt-run" ln -sf "$INSTALL_DIR/salt-run" "$SBIN_DIR/salt-run"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-run" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for spm..." log "Symlink: Creating symlinks for spm..."
ln -sf "$INSTALL_DIR/spm" "$SBIN_DIR/spm" ln -sf "$INSTALL_DIR/spm" "$SBIN_DIR/spm"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-spm" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-ssh..." log "Symlink: Creating symlinks for salt-ssh..."
ln -sf "$INSTALL_DIR/salt-ssh" "$SBIN_DIR/salt-ssh" ln -sf "$INSTALL_DIR/salt-ssh" "$SBIN_DIR/salt-ssh"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-ssh" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
log "Symlink: Creating symlinks for salt-syndic..." log "Symlink: Creating symlinks for salt-syndic..."
ln -sf "$INSTALL_DIR/salt-syndic" "$SBIN_DIR/salt-syndic" ln -sf "$INSTALL_DIR/salt-syndic" "$SBIN_DIR/salt-syndic"
log "Symlink: Created Successfully" if [ -f "$SBIN_DIR/salt-syndic" ]; then
log "Symlink: Created Successfully"
else
log "Symlink: Failed to create symlink"
fi
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# Add salt to paths.d # Add salt to paths.d

View file

@ -20,7 +20,7 @@ Version: 0.8.3
Release: 0 Release: 0
License: Apache-2.0 License: Apache-2.0
Summary: The api for Salt a parallel remote execution system Summary: The api for Salt a parallel remote execution system
Url: http://saltstack.org/ Url: http://saltproject.io/
Group: System/Monitoring Group: System/Monitoring
Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
Source1: salt-api Source1: salt-api

View file

@ -41,7 +41,7 @@ Release: 0
Summary: A parallel remote execution system Summary: A parallel remote execution system
License: Apache-2.0 License: Apache-2.0
Group: System/Monitoring Group: System/Monitoring
Url: http://saltstack.org/ Url: http://saltproject.io/
Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz Source0: http://pypi.python.org/packages/source/s/%{name}/%{name}-%{version}.tar.gz
Source1: README.SUSE Source1: README.SUSE
Source2: salt-tmpfiles.d Source2: salt-tmpfiles.d

View file

@ -1,6 +1,3 @@
# written by David Pravec
# - feel free to /msg alekibango on IRC if you want to talk about this file
# TODO: check if --config|-c was used and use configured config file for queries # TODO: check if --config|-c was used and use configured config file for queries
# TODO: solve somehow completion for salt -G pythonversion:[tab] # TODO: solve somehow completion for salt -G pythonversion:[tab]
# (not sure what to do with lists) # (not sure what to do with lists)

View file

@ -176,13 +176,10 @@ $BUILD_DIR = "$SCRIPT_DIR\buildenv"
$RELENV_DIR = "${env:LOCALAPPDATA}\relenv" $RELENV_DIR = "${env:LOCALAPPDATA}\relenv"
$SYS_PY_BIN = (python -c "import sys; print(sys.executable)") $SYS_PY_BIN = (python -c "import sys; print(sys.executable)")
$BLD_PY_BIN = "$BUILD_DIR\Scripts\python.exe" $BLD_PY_BIN = "$BUILD_DIR\Scripts\python.exe"
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies"
if ( $Architecture -eq "x64" ) { if ( $Architecture -eq "x64" ) {
$SALT_DEP_URL = "$SALT_DEP_URL/64"
$ARCH = "amd64" $ARCH = "amd64"
} else { } else {
$SALT_DEP_URL = "$SALT_DEP_URL/32"
$ARCH = "x86" $ARCH = "x86"
} }
@ -249,7 +246,7 @@ if ( $env:VIRTUAL_ENV ) {
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# Installing Relenv # Installing Relenv
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
Write-Host "Installing Relenv: " -NoNewLine Write-Host "Installing Relenv ($RelenvVersion): " -NoNewLine
pip install relenv==$RelenvVersion --disable-pip-version-check | Out-Null pip install relenv==$RelenvVersion --disable-pip-version-check | Out-Null
$output = pip list --disable-pip-version-check $output = pip list --disable-pip-version-check
if ("relenv" -in $output.split()) { if ("relenv" -in $output.split()) {

View file

@ -46,7 +46,7 @@ $NSIS_DIR = "${env:ProgramFiles(x86)}\NSIS"
$NSIS_PLUG_A = "$NSIS_DIR\Plugins\x86-ansi" $NSIS_PLUG_A = "$NSIS_DIR\Plugins\x86-ansi"
$NSIS_PLUG_U = "$NSIS_DIR\Plugins\x86-unicode" $NSIS_PLUG_U = "$NSIS_DIR\Plugins\x86-unicode"
$NSIS_LIB_DIR = "$NSIS_DIR\Include" $NSIS_LIB_DIR = "$NSIS_DIR\Include"
$DEPS_URL = "https://repo.saltproject.io/windows/dependencies" $DEPS_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/nsis"
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# Start the Script # Start the Script
@ -68,7 +68,7 @@ if ( Test-Path -Path "$check_file" ) {
Write-Result "Missing" -ForegroundColor Yellow Write-Result "Missing" -ForegroundColor Yellow
Write-Host "Downloading NSIS: " -NoNewline Write-Host "Downloading NSIS: " -NoNewline
$url = "$DEPS_URL/nsis-3.03-setup.exe" $url = "$DEPS_URL/nsis-3.10-setup.exe"
$file = "$env:TEMP\install_nsis.exe" $file = "$env:TEMP\install_nsis.exe"
Invoke-WebRequest -Uri $url -OutFile "$file" Invoke-WebRequest -Uri $url -OutFile "$file"
if ( Test-Path -Path "$file" ) { if ( Test-Path -Path "$file" ) {

View file

@ -81,11 +81,6 @@ $ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architectur
# Script Variables # Script Variables
$PROJECT_DIR = $(git rev-parse --show-toplevel) $PROJECT_DIR = $(git rev-parse --show-toplevel)
$SALT_DEPS = "$PROJECT_DIR\requirements\static\pkg\py$PY_VERSION\windows.txt" $SALT_DEPS = "$PROJECT_DIR\requirements\static\pkg\py$PY_VERSION\windows.txt"
if ( $ARCH -eq "64bit" ) {
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64"
} else {
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/32"
}
if ( ! $SkipInstall ) { if ( ! $SkipInstall ) {
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------

View file

@ -77,7 +77,7 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) {
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
$WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir" $WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir"
$DEPS_URL = "https://repo.saltproject.io/windows/dependencies" $DEPS_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/vcredist"
$PROJECT_DIR = $(git rev-parse --show-toplevel) $PROJECT_DIR = $(git rev-parse --show-toplevel)
$BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" $BUILD_DIR = "$PROJECT_DIR\pkg\windows\build"
$BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" $BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv"
@ -169,12 +169,12 @@ if ( ! "$env:WIX" ) {
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
$RUNTIMES = @( $RUNTIMES = @(
("Microsoft_VC143_CRT_x64.msm", "64", "F209B8906063A79B0DFFBB55D3C20AC0A676252DD4F5377CFCD148C409C859EC"), ("Microsoft_VC143_CRT_x64.msm", "F209B8906063A79B0DFFBB55D3C20AC0A676252DD4F5377CFCD148C409C859EC"),
("Microsoft_VC143_CRT_x86.msm", "32", "B187BD73C7DC0BA353C5D3A6D9D4E63EF72435F8E68273466F30E5496C1A86F7") ("Microsoft_VC143_CRT_x86.msm", "B187BD73C7DC0BA353C5D3A6D9D4E63EF72435F8E68273466F30E5496C1A86F7")
) )
$RUNTIMES | ForEach-Object { $RUNTIMES | ForEach-Object {
$name, $arch, $hash = $_ $name, $hash = $_
VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$arch/$name" "$hash" VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$name" "$hash"
} }
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------

View file

@ -46,7 +46,7 @@ $WINDOWS_DIR = "$PROJECT_DIR\pkg\windows"
$NSIS_DIR = "$WINDOWS_DIR\nsis" $NSIS_DIR = "$WINDOWS_DIR\nsis"
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv" $BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe" $NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64" $SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# Script Start # Script Start

View file

@ -47,7 +47,7 @@ $NSIS_DIR = "$WINDOWS_DIR\nsis"
$BUILDENV_DIR = "$WINDOWS_DIR\buildenv" $BUILDENV_DIR = "$WINDOWS_DIR\buildenv"
$PREREQS_DIR = "$WINDOWS_DIR\prereqs" $PREREQS_DIR = "$WINDOWS_DIR\prereqs"
$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe" $NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe"
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64" $SALT_DEP_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main/ssm/64/"
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
# Script Start # Script Start

View file

@ -66,20 +66,20 @@ $PREREQ_DIR = "$SCRIPT_DIR\prereqs"
$SCRIPTS_DIR = "$BUILD_DIR\Scripts" $SCRIPTS_DIR = "$BUILD_DIR\Scripts"
$BUILD_CONF_DIR = "$BUILD_DIR\configs" $BUILD_CONF_DIR = "$BUILD_DIR\configs"
$SITE_PKGS_DIR = "$BUILD_DIR\Lib\site-packages" $SITE_PKGS_DIR = "$BUILD_DIR\Lib\site-packages"
$BUILD_SALT_DIR = "$SITE_PKGS_DIR\salt"
$PYTHON_BIN = "$SCRIPTS_DIR\python.exe" $PYTHON_BIN = "$SCRIPTS_DIR\python.exe"
$PY_VERSION = [Version]((Get-Command $PYTHON_BIN).FileVersionInfo.ProductVersion) $PY_VERSION = [Version]((Get-Command $PYTHON_BIN).FileVersionInfo.ProductVersion)
$PY_VERSION = "$($PY_VERSION.Major).$($PY_VERSION.Minor)" $PY_VERSION = "$($PY_VERSION.Major).$($PY_VERSION.Minor)"
$ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])") $PY_ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])")
$DEPS_URL = "https://github.com/saltstack/salt-windows-deps/raw/refs/heads/main"
if ( $ARCH -eq "64bit" ) { if ( $PY_ARCH -eq "64bit" ) {
$ARCH = "AMD64" $ARCH = "x64"
$ARCH_X = "x64" $SSM_URL = "$DEPS_URL/ssm/64/ssm-2.24-103-gdee49fc.exe"
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/64" $VCREDIST_URL = "$DEPS_URL/vcredist"
} else { } else {
$ARCH = "x86" $ARCH = "x86"
$ARCH_X = "x86" $SSM_URL = "$DEPS_URL/ssm/32/ssm-2.24-103-gdee49fc.exe"
$SALT_DEP_URL = "https://repo.saltproject.io/windows/dependencies/32" $VCREDIST_URL = "$DEPS_URL/vcredist"
} }
#------------------------------------------------------------------------------- #-------------------------------------------------------------------------------
@ -155,14 +155,15 @@ if ( $PKG ) {
# Make sure ssm.exe is present. This is needed for VMtools # Make sure ssm.exe is present. This is needed for VMtools
if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) { if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) {
Write-Host "Copying SSM to Root: " -NoNewline Write-Host "Copying SSM $ARCH to Root: " -NoNewline
Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILD_DIR\ssm.exe" Invoke-WebRequest -Uri "$SSM_URL" -OutFile "$BUILD_DIR\ssm.exe"
if ( Test-Path -Path "$BUILD_DIR\ssm.exe" ) { if ( Test-Path -Path "$BUILD_DIR\ssm.exe" ) {
Write-Result "Success" -ForegroundColor Green Write-Result "Success" -ForegroundColor Green
} else { } else {
Write-Result "Failed" -ForegroundColor Red Write-Result "Failed" -ForegroundColor Red
exit 1 exit 1
} }
Write-Host $SSM_URL
} }
# Copy the multiminion scripts to the Build directory # Copy the multiminion scripts to the Build directory
@ -185,9 +186,9 @@ $scripts | ForEach-Object {
# Copy VCRedist 2022 to the prereqs directory # Copy VCRedist 2022 to the prereqs directory
New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null
Write-Host "Copying VCRedist 2022 $ARCH_X to prereqs: " -NoNewline Write-Host "Copying VCRedist 2022 $ARCH to prereqs: " -NoNewline
$file = "vcredist_$ARCH_X`_2022.exe" $file = "vcredist_$ARCH`_2022.exe"
Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file" Invoke-WebRequest -Uri "$VCREDIST_URL\$file" -OutFile "$PREREQ_DIR\$file"
if ( Test-Path -Path "$PREREQ_DIR\$file" ) { if ( Test-Path -Path "$PREREQ_DIR\$file" ) {
Write-Result "Success" -ForegroundColor Green Write-Result "Success" -ForegroundColor Green
} else { } else {
@ -267,70 +268,20 @@ $directories | ForEach-Object {
} }
} }
Write-Host "Removing __pycache__ directories: " -NoNewline Write-Host "Removing unneeded files (.pyc, .chm): " -NoNewline
$found = Get-ChildItem -Path "$BUILD_DIR" -Filter "__pycache__" -Recurse $remove = "__pycache__",
$found | ForEach-Object { "*.pyc",
Remove-Item -Path "$($_.FullName)" -Recurse -Force
if ( Test-Path -Path "$($_.FullName)" ) {
Write-Result "Failed" -ForegroundColor Red
Write-Host "Failed to remove: $($_.FullName)"
exit 1
}
}
Write-Result "Success" -ForegroundColor Green
# If we try to remove *.pyc with the same Get-ChildItem that we used to remove
# __pycache__ directories, it won't be able to find them because they are no
# longer present
# This probably won't find any *.pyc files, but just in case
$remove = "*.pyc",
"*.chm" "*.chm"
$remove | ForEach-Object { $remove | ForEach-Object {
Write-Host "Removing unneeded $_ files: " -NoNewline $found = Get-ChildItem -Path "$BUILD_DIR\$_" -Recurse
$found = Get-ChildItem -Path "$BUILD_DIR" -Filter $_ -Recurse
$found | ForEach-Object { $found | ForEach-Object {
Remove-Item -Path "$($_.FullName)" -Recurse -Force Remove-Item -Path "$_" -Recurse -Force
if ( Test-Path -Path "$($_.FullName)" ) { if ( Test-Path -Path $_ ) {
Write-Result "Failed" -ForegroundColor Red Write-Result "Failed" -ForegroundColor Red
Write-Host "Failed to remove: $($_.FullName)" Write-Host "Failed to remove: $_"
exit 1 exit 1
} }
} }
Write-Result "Success" -ForegroundColor Green
}
#-------------------------------------------------------------------------------
# Set timestamps on Files
#-------------------------------------------------------------------------------
# We're doing this again in this script because we use python above to get the
# build architecture and that adds back some __pycache__ and *.pyc files
Write-Host "Getting commit time stamp: " -NoNewline
[DateTime]$origin = "1970-01-01 00:00:00"
$hash_time = $(git show -s --format=%at)
$time_stamp = $origin.AddSeconds($hash_time)
if ( $hash_time ) {
Write-Result "Success" -ForegroundColor Green
} else {
Write-Result "Failed" -ForegroundColor Red
exit 1
}
Write-Host "Setting time stamp on all salt files: " -NoNewline
$found = Get-ChildItem -Path $BUILD_DIR -Recurse
$found | ForEach-Object {
$_.CreationTime = $time_stamp
$_.LastAccessTime = $time_stamp
$_.LastWriteTime = $time_stamp
}
Write-Result "Success" -ForegroundColor Green
Write-Host "Setting time stamp on all prereq files: " -NoNewline
$found = Get-ChildItem -Path $PREREQ_DIR -Recurse
$found | ForEach-Object {
$_.CreationTime = $time_stamp
$_.LastAccessTime = $time_stamp
$_.LastWriteTime = $time_stamp
} }
Write-Result "Success" -ForegroundColor Green Write-Result "Success" -ForegroundColor Green

View file

@ -1,219 +0,0 @@
:: ############################################################################
::
:: FILE: sign.bat
::
:: DESCRIPTION: Signing and Hashing script for Salt builds on Windows.
:: Requires an official Code Signing Certificate and drivers
:: installed to sign the files. Generates hashes in MD5 and
:: SHA256 in a file of the same name with a `.md5` or
:: `.sha256` extension.
::
:: NOTE: This script is used internally by SaltStack to sign and
:: hash Windows Installer builds and uses resources not
:: available to the community, such as SaltStack's Code
:: Signing Certificate. It is placed here for version
:: control.
::
:: COPYRIGHT: (c) 2012-2018 by the SaltStack Team
::
:: LICENSE: Apache 2.0
:: ORGANIZATION: SaltStack, Inc (saltstack.com)
:: CREATED: 2017
::
:: ############################################################################
::
:: USAGE: The script must be located in a directory that has the installer
:: files in a sub-folder named with the major version, ie: `2018.3`.
:: Insert the key fob that contains the code signing certificate. Run
:: the script passing the full version: `.\sign.bat 2018.3.1`.
::
:: The script will sign the installers and generate the corresponding
:: hash files. These can then be uploaded to the salt repo.
::
:: The files must be in the following format:
:: <Series>\Salt-Minion-<Version>-<Python Version>-<System Architecture>-Setup.exe
:: So, for a Salt Minion installer for 2018.3.1 on AMD64 for Python 3
:: file would be placed in a subdirectory named `2018.3` and the file
:: would be named: `Salt-Minion-2018.3.1-Py3-AMD64-Setup.exe`. This
:: is how the file is created by the NSI Script anyway.
::
:: You can test the timestamp server with the following command:
:: curl -i timestamp.digicert.com/timestamp/health
::
:: REQUIREMENTS: This script requires the ``signtool.exe`` binary that is a part
:: of the Windows SDK. To install just the ``signtool.exe``:
::
:: OPTION 1:
:: 1. Download the Windows 10 SDK ISO:
:: https://developer.microsoft.com/en-us/windows/downloads/windows-sdk/
:: 2. Mount the ISO and browse to the ``Installers`` directory
:: 3. Run the ``Windows SDK Signing Tools-x86_en-us.msi``
::
:: OPTION 2:
:: 1. Download the Visual Studio BUild Tools:
:: https://aka.ms/vs/15/release/vs_buildtools.exe
:: 2. Run the following command:
:: vs_buildtools.exe --quiet --add Microsoft.Component.ClickOnce.MSBuild
::
:: ############################################################################
@ echo off
if [%1]==[] (
echo You must pass a version
goto quit
) else (
set "Version=%~1"
)
set Series=%Version:~0,4%
if not exist .\%Series%\ (
echo - Series %Series% is not valid
exit 1
)
:: Sign Installer Files
echo ===========================================================================
echo Signing...
echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
signtool.exe sign /a /t http://timestamp.digicert.com ^
"%Series%\Salt-Minion-%Version%-AMD64-Setup.exe" ^
"%Series%\Salt-Minion-%Version%-x86-Setup.exe" ^
"%Series%\Salt-%Version%-AMD64-Setup.exe" ^
"%Series%\Salt-%Version%-x86-Setup.exe" ^
"%Series%\Salt-%Version%-Py2-AMD64-Setup.exe" ^
"%Series%\Salt-%Version%-Py2-x86-Setup.exe" ^
"%Series%\Salt-%Version%-Py3-AMD64-Setup.exe" ^
"%Series%\Salt-%Version%-Py3-x86-Setup.exe" ^
"%Series%\Salt-Minion-%Version%-Py2-AMD64-Setup.exe" ^
"%Series%\Salt-Minion-%Version%-Py2-x86-Setup.exe" ^
"%Series%\Salt-Minion-%Version%-Py3-AMD64-Setup.exe" ^
"%Series%\Salt-Minion-%Version%-Py3-x86-Setup.exe" ^
"%Series%\Salt-Minion-%Version%-Py3-AMD64.msi" ^
"%Series%\Salt-Minion-%Version%-Py3-x86.msi"
echo %ERRORLEVEL%
echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
echo Signing Complete
echo ===========================================================================
:: Create Hash files
echo ===========================================================================
echo Creating Hashes...
echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
set "file_name=Salt-Minion-%Version%-AMD64-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-x86-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-%Version%-AMD64-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-%Version%-x86-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-%Version%-Py2-AMD64-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-%Version%-Py2-x86-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-%Version%-Py3-AMD64-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-%Version%-Py3-x86-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-Py2-AMD64-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-Py2-x86-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-Py3-AMD64-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-Py3-x86-Setup.exe"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-Py3-AMD64.msi"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
set "file_name=Salt-Minion-%Version%-Py3-x86.msi"
set "file=.\%Series%\%file_name%"
if exist "%file%" (
echo - %file_name%
powershell -c "$hash = (Get-FileHash -Algorithm MD5 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.md5\" -NoNewLine -Encoding ASCII"
powershell -c "$hash = (Get-FileHash -Algorithm SHA256 \"%file%\").Hash; Out-File -InputObject $hash\" %file_name%\" -FilePath \"%file%.sha256\" -NoNewLine -Encoding ASCII"
)
echo ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
echo Hashing Complete
echo ===========================================================================
:quit

View file

@ -1,3 +1,4 @@
setuptools >= 65.6.3,< 69.0 setuptools >= 65.6.3,< 69.0
setuptools-scm < 8.0.0 setuptools-scm < 8.0.0
pip >= 23.3,< 24.0 pip >= 23.3,< 24.0 ; python_version < '3.12'
pip >24 ; python_version >= '3.12'

View file

@ -1,27 +0,0 @@
#!/bin/bash
# This legacy script pre-dates the salt-bootstrap project. In most cases, the
# bootstrap-salt.sh script is the recommended script for installing salt onto
# a new minion. However, that may not be appropriate for all situations. This
# script remains to help fill those needs, and to provide an example for users
# needing to write their own deploy scripts.
rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/5/x86_64/epel-release-5-4.noarch.rpm
yum install -y salt-minion git
rm -rf /usr/lib/python2.6/site-packages/salt*
rm -rf /usr/bin/salt-*
mkdir -p /root/git
cd /root/git
git clone git://github.com/saltstack/salt.git
cd salt
python26 setup.py install
cd
mkdir -p /etc/salt/pki
echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
cat > /etc/salt/minion <<EOF
{{minion}}
EOF
/sbin/chkconfig salt-minion on
service salt-minion start

View file

@ -1,19 +0,0 @@
#!/bin/bash
# This legacy script pre-dates the salt-bootstrap project. In most cases, the
# bootstrap-salt.sh script is the recommended script for installing salt onto
# a new minion. However, that may not be appropriate for all situations. This
# script remains to help fill those needs, and to provide an example for users
# needing to write their own deploy scripts.
rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/5/x86_64/epel-release-5-4.noarch.rpm
yum install -y salt-minion
mkdir -p /etc/salt/pki
echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
cat > /etc/salt/minion <<EOF
{{minion}}
EOF
/sbin/chkconfig salt-minion on
service salt-minion start

View file

@ -1,27 +0,0 @@
#!/bin/bash
# This legacy script pre-dates the salt-bootstrap project. In most cases, the
# bootstrap-salt.sh script is the recommended script for installing salt onto
# a new minion. However, that may not be appropriate for all situations. This
# script remains to help fill those needs, and to provide an example for users
# needing to write their own deploy scripts.
rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/6/x86_64/epel-release-6-8.noarch.rpm
yum -y install salt-minion git --enablerepo epel-testing
rm -rf /usr/lib/python/site-packages/salt*
rm -rf /usr/bin/salt-*
mkdir -p /root/git
cd /root/git
git clone git://github.com/saltstack/salt.git
cd salt
python setup.py install
cd
mkdir -p /etc/salt/pki
echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
cat > /etc/salt/minion <<EOF
{{minion}}
EOF
/sbin/chkconfig salt-minion on
service salt-minion start

View file

@ -1,19 +0,0 @@
#!/bin/bash
# This legacy script pre-dates the salt-bootstrap project. In most cases, the
# bootstrap-salt.sh script is the recommended script for installing salt onto
# a new minion. However, that may not be appropriate for all situations. This
# script remains to help fill those needs, and to provide an example for users
# needing to write their own deploy scripts.
rpm -Uvh --force http://mirrors.kernel.org/fedora-epel/6/x86_64/epel-release-6-8.noarch.rpm
yum -y install salt-minion --enablerepo epel-testing
mkdir -p /etc/salt/pki
echo '{{ vm['priv_key'] }}' > /etc/salt/pki/minion.pem
echo '{{ vm['pub_key'] }}' > /etc/salt/pki/minion.pub
cat > /etc/salt/minion <<EOF
{{minion}}
EOF
/sbin/chkconfig salt-minion on
service salt-minion start

File diff suppressed because it is too large Load diff

View file

@ -7,11 +7,11 @@
# #
# It has been designed as an example, to be customized for your own needs. # It has been designed as an example, to be customized for your own needs.
curl -L https://bootstrap.saltstack.com | sudo sh -s -- "$@" git develop curl -L https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s -- "$@" git develop
# By default, Salt Cloud now places the minion's keys and configuration in # By default, Salt Cloud now places the minion's keys and configuration in
# /tmp/.saltcloud/ before executing the deploy script. After it has executed, # /tmp/.saltcloud/ before executing the deploy script. After it has executed,
# these temporary files are removed. If you don't want salt-bootstrap to handle # these temporary files are removed. If you don't want salt-bootstrap to handle
# these files, comment out the above command, and uncomment the below command. # these files, comment out the above command, and uncomment the below command.
#curl -L https://bootstrap.saltstack.com | sudo sh -s git develop #curl -L https://github.com/saltstack/salt-bootstrap/releases/latest/download/bootstrap-salt.sh | sudo sh -s git develop

Some files were not shown because too many files have changed in this diff Show more