Merge branch 'master' into more-esxi-banners

This commit is contained in:
David Murphy 2023-02-15 12:45:40 -07:00 committed by GitHub
commit 3b6f233309
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
31 changed files with 669 additions and 524 deletions

View file

@ -22,7 +22,7 @@ inputs:
env:
COLUMNS: 160
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple

View file

@ -26,7 +26,7 @@ inputs:
env:
COLUMNS: 160
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple

View file

@ -14,7 +14,7 @@ inputs:
env:
COLUMNS: 160
COLUMNS: 190
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple

View file

@ -22,7 +22,7 @@ outputs:
description: The Salt version written to `salt/_version.txt`
env:
COLUMNS: 160
COLUMNS: 190
runs:
using: composite

View file

@ -22,7 +22,7 @@ on:
required: true
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build-repo:

View file

@ -22,7 +22,7 @@ on:
required: true
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build-repo:

View file

@ -22,7 +22,7 @@ on:
required: true
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build-repo:
@ -39,12 +39,6 @@ jobs:
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download Windows Packages
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}-x86-windows-pkgs
path: artifacts/pkgs/incoming
- name: Download Linux x86_64 Onedir Archive
uses: actions/download-artifact@v3
with:

View file

@ -22,9 +22,20 @@ on:
required: true
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build-src:
name: Build Source Repository
uses: ./.github/workflows/build-src-repo.yml
with:
environment: ${{ inputs.environment }}
salt-version: "${{ inputs.salt-version }}"
nightly-build: ${{ inputs.nightly-build }}
rc-build: ${{ inputs.rc-build }}
secrets:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
build-deb:
name: Build DEB Repositories
uses: ./.github/workflows/build-deb-repo.yml

View file

@ -10,7 +10,7 @@ on:
description: The Salt version to set prior to building packages.
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build:

View file

@ -22,7 +22,7 @@ on:
required: true
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build-repo:

93
.github/workflows/build-src-repo.yml vendored Normal file
View file

@ -0,0 +1,93 @@
---
name: Build Source Repository
on:
workflow_call:
inputs:
salt-version:
type: string
required: true
description: The Salt version to set prior to building packages.
nightly-build:
type: boolean
default: false
rc-build:
type: boolean
default: false
environment:
type: string
description: On which GitHub Environment Context To Run
secrets:
SECRETS_KEY:
required: true
env:
COLUMNS: 190
jobs:
build-repo:
name: Source
environment: ${{ inputs.environment }}
runs-on:
- self-hosted
- linux
- repo-${{ inputs.environment }}
steps:
- uses: actions/checkout@v3
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Download Source Tarball
uses: actions/download-artifact@v3
with:
name: salt-${{ inputs.salt-version }}.tar.gz
path: artifacts/pkgs/incoming
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
--query SecretString --output text | jq .default_key -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
--query SecretString --output text| jq .default_passphrase -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Create Repository Path
run: |
mkdir -p artifacts/pkgs/repo
- name: Create Repository
run: |
tools pkg repo create src --key-id=64CBBC8173D76B3F \
${{ inputs.nightly-build && '--nightly-build' || '' }} --salt-version=${{ inputs.salt-version }} \
${{ inputs.rc-build && '--rc-build' || '' }} --incoming=artifacts/pkgs/incoming \
--repo-path=artifacts/pkgs/repo
- name: Upload Repository As An Artifact
uses: ./.github/actions/upload-artifact
with:
name: salt-${{ inputs.salt-version }}-${{ inputs.environment }}-repo
path: artifacts/pkgs/repo/*
retention-days: 7
if-no-files-found: error
archive-name: src-repo

View file

@ -22,7 +22,7 @@ on:
required: true
env:
COLUMNS: 160
COLUMNS: 190
jobs:
build-repo:

View file

@ -7,7 +7,7 @@ on:
pull_request: {}
env:
COLUMNS: 160
COLUMNS: 190
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"

View file

@ -9,7 +9,7 @@ on:
- cron: '0 3 * * *' # Every day at 3AM
env:
COLUMNS: 160
COLUMNS: 190
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"

View file

@ -13,7 +13,7 @@ on:
description: The Salt version
env:
COLUMNS: 160
COLUMNS: 190
jobs:

View file

@ -1,207 +0,0 @@
---
name: Generate Release Docs
on:
workflow_dispatch:
inputs:
saltVersion:
description: 'Salt Version'
required: true
manPages:
description: "Build Man Pages"
default: true
required: false
branch:
description: "Branch to build/push PR"
default: "master"
required: false
env:
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/
PIP_EXTRA_INDEX_URL: https://pypi.org/simple
permissions:
contents: read
jobs:
SaltChangelog:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Build Salt Changelog
runs-on: ubuntu-latest
container:
image: python:3.8-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev xz-utils
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.branch }}
fetch-depth: 0
- name: Install Nox
run: |
python -m pip install --upgrade pip
pip install nox
- id: changed-files
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
docs:
- doc/**
- name: Install Python Requirements
run: |
nox --install-only --forcecolor -e 'changelog(force=True, draft=False)' -- ${{ github.event.inputs.saltVersion }}
nox --install-only --forcecolor -e 'changelog(force=False, draft=True)' -- ${{ github.event.inputs.saltVersion }}
- name: Build Changelog
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e 'changelog(force=False, draft=True)' -- ${{ github.event.inputs.saltVersion }} > rn_changelog
nox --forcecolor -e 'changelog(force=True, draft=False)' -- ${{ github.event.inputs.saltVersion }}
git restore --staged changelog/
git diff --no-color > rm_changelog_patch.txt
- name: Store Generated Changelog
uses: actions/upload-artifact@v3
with:
name: salt-changelog
path: |
CHANGELOG.md
rm_changelog_patch.txt
rn_changelog
Manpages:
permissions:
contents: read # for dorny/paths-filter to fetch a list of changed files
pull-requests: read # for dorny/paths-filter to read pull requests
name: Build Salt man Pages
runs-on: ubuntu-latest
container:
image: python:3.8-slim-buster
steps:
- name: Install System Deps
if: github.event.inputs.manPages == 'true'
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.branch }}
- id: changed-files
if: github.event.inputs.manPages == 'true'
name: Get Changed Files
uses: dorny/paths-filter@v2
with:
token: ${{ github.token }}
list-files: json
filters: |
docs:
- doc/**
- name: Install Nox
if: github.event.inputs.manPages == 'true'
run: |
python -m pip install --upgrade pip
pip install nox
- name: Install Python Requirements
if: github.event.inputs.manPages == 'true'
run:
nox --install-only --forcecolor -e 'docs-man(compress=False, update=True, clean=True)'
- name: Build Manpages
if: github.event.inputs.manPages == 'true'
env:
SKIP_REQUIREMENTS_INSTALL: YES
run: |
nox --forcecolor -e 'docs-man(compress=False, update=True, clean=True)'
- name: Store Generated Documentation
if: github.event.inputs.manPages == 'true'
uses: actions/upload-artifact@v3
with:
name: salt-man-pages
path: doc/_build/man
PullRequest:
permissions:
contents: write # for peter-evans/create-pull-request to create branch
pull-requests: write # for peter-evans/create-pull-request to create a PR
needs: [SaltChangelog, Manpages]
name: Create Pull Request
runs-on: ubuntu-latest
container:
image: python:3.8-slim-buster
steps:
- name: Install System Deps
run: |
echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list
apt-get update
apt-get install -y enchant git gcc imagemagick make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev xz-utils
apt-get install -y git/buster-backports
- uses: actions/checkout@v3
with:
ref: ${{ github.event.inputs.branch }}
fetch-depth: 0
- name: Download salt-man-pages
if: github.event.inputs.manPages == 'true'
uses: actions/download-artifact@v3
with:
name: salt-man-pages
path: doc/man/
- name: Download salt changelog
uses: actions/download-artifact@v3
with:
name: salt-changelog
- name: Generate Release Notes and changelog
run: |
git apply rm_changelog_patch.txt
rm rm_changelog_patch.txt
sed -i '0,/^======/d' rn_changelog
cat rn_changelog
cat rn_changelog >> doc/topics/releases/${{ github.event.inputs.saltVersion }}.rst
rm rn_changelog
- name: Create Pull Request for Release
uses: peter-evans/create-pull-request@v3
with:
token: ${{ secrets.GITHUB_TOKEN }}
commit-message: Add changelog and docs for ${{ github.event.inputs.saltVersion }} release
title: 'Generate Salt Man Page and Changelog'
body: >
This PR is auto-generated by
[create-pull-request](https://github.com/peter-evans/create-pull-request).
branch: docs_${{ github.event.inputs.saltVersion }}
base: ${{ github.event.inputs.branch }}

View file

@ -9,7 +9,7 @@ on:
- cron: '0 */8 * * *' # Run every 8 hours
env:
COLUMNS: 160
COLUMNS: 190
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"

View file

@ -15,7 +15,7 @@ on:
<%- block env %>
env:
COLUMNS: 160
COLUMNS: 190
CACHE_SEED: SEED-0 # Bump the number to invalidate all caches
RELENV_DATA: "${{ github.workspace }}/.relenv"

View file

@ -46,7 +46,7 @@ on:
env:
NOX_VERSION: "2022.8.7"
COLUMNS: 160
COLUMNS: 190
PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/"
PIP_EXTRA_INDEX_URL: "https://pypi.org/simple"

View file

@ -41,7 +41,7 @@ on:
env:
NOX_VERSION: "2022.8.7"
COLUMNS: 160
COLUMNS: 190
AWS_MAX_ATTEMPTS: "10"
AWS_RETRY_MODE: "adaptive"
PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/

View file

@ -30,7 +30,7 @@ pygments==2.13.0
# via rich
python-dateutil==2.8.2
# via botocore
python-tools-scripts==0.10.2
python-tools-scripts==0.10.4
# via -r requirements/static/ci/tools.in
pyyaml==6.0
# via -r requirements/static/ci/tools.in

View file

@ -30,7 +30,7 @@ pygments==2.13.0
# via rich
python-dateutil==2.8.2
# via botocore
python-tools-scripts==0.10.2
python-tools-scripts==0.10.4
# via -r requirements/static/ci/tools.in
pyyaml==6.0
# via -r requirements/static/ci/tools.in

View file

@ -1,4 +1,4 @@
python-tools-scripts >= 0.10.2
python-tools-scripts >= 0.10.4
attrs
boto3
pyyaml

View file

@ -7,16 +7,15 @@ from __future__ import annotations
import datetime
import logging
import os
import pathlib
import subprocess
import sys
import textwrap
from ptscripts import Context, command_group
log = logging.getLogger(__name__)
import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
log = logging.getLogger(__name__)
# Define the command group
changelog = command_group(
@ -25,7 +24,7 @@ changelog = command_group(
description=__doc__,
venv_config={
"requirements_files": [
REPO_ROOT
tools.utils.REPO_ROOT
/ "requirements"
/ "static"
/ "ci"

View file

@ -13,9 +13,9 @@ from typing import TYPE_CHECKING
from ptscripts import Context, command_group
log = logging.getLogger(__name__)
import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
log = logging.getLogger(__name__)
# Define the command group
ci = command_group(name="ci", help="CI Related Commands", description=__doc__)
@ -107,7 +107,11 @@ def process_changed_files(ctx: Context, event_name: str, changed_files: pathlib.
if not entry:
loaded_data.remove(entry)
try:
entry = REPO_ROOT.joinpath(entry).resolve().relative_to(REPO_ROOT)
entry = (
tools.utils.REPO_ROOT.joinpath(entry)
.resolve()
.relative_to(tools.utils.REPO_ROOT)
)
except ValueError:
ctx.error(
f"While processing the changed files key {key!r}, the "
@ -261,7 +265,7 @@ def define_jobs(ctx: Context, event_name: str, changed_files: pathlib.Path):
with open(github_step_summary, "a", encoding="utf-8") as wfh:
wfh.write(
f"All defined jobs will run due to event type of {event_name!r}.\n"
f"All defined jobs will run due to event type of `{event_name}`.\n"
)
return
@ -368,7 +372,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
wfh.write(f"testrun={json.dumps(testrun)}\n")
with open(github_step_summary, "a", encoding="utf-8") as wfh:
wfh.write(f"Full test run chosen due to event type of {event_name!r}.\n")
wfh.write(f"Full test run chosen due to event type of `{event_name}`.\n")
return
if not changed_files.exists():
@ -417,10 +421,12 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
wfh.write("</pre>\n</details>\n")
testrun = {"type": "full"}
else:
testrun_changed_files_path = REPO_ROOT / "testrun-changed-files.txt"
testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
testrun = {
"type": "changed",
"from-filenames": str(testrun_changed_files_path.relative_to(REPO_ROOT)),
"from-filenames": str(
testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
),
}
ctx.info(f"Writing {testrun_changed_files_path.name} ...")
selected_changed_files = []

View file

@ -12,9 +12,9 @@ import sys
from ptscripts import Context, command_group
log = logging.getLogger(__name__)
import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
log = logging.getLogger(__name__)
# Define the command group
docs = command_group(
@ -23,7 +23,7 @@ docs = command_group(
description=__doc__,
venv_config={
"requirements_files": [
REPO_ROOT
tools.utils.REPO_ROOT
/ "requirements"
/ "static"
/ "ci"

View file

@ -19,9 +19,9 @@ import tempfile
import yaml
from ptscripts import Context, command_group
log = logging.getLogger(__name__)
import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
log = logging.getLogger(__name__)
# Define the command group
pkg = command_group(name="pkg", help="Packaging Related Commands", description=__doc__)
@ -112,14 +112,14 @@ def set_salt_version(
"""
Write the Salt version to 'salt/_version.txt'
"""
salt_version_file = REPO_ROOT / "salt" / "_version.txt"
salt_version_file = tools.utils.REPO_ROOT / "salt" / "_version.txt"
if salt_version_file.exists():
if not overwrite:
ctx.error("The 'salt/_version.txt' file already exists")
ctx.exit(1)
salt_version_file.unlink()
if salt_version is None:
if not REPO_ROOT.joinpath(".git").exists():
if not tools.utils.REPO_ROOT.joinpath(".git").exists():
ctx.error(
"Apparently not running from a Salt repository checkout. "
"Unable to discover the Salt version."
@ -133,7 +133,7 @@ def set_salt_version(
ctx.info(f"Validating and normalizing the salt version {salt_version!r}...")
with ctx.virtualenv(
name="set-salt-version",
requirements_files=[REPO_ROOT / "requirements" / "base.txt"],
requirements_files=[tools.utils.REPO_ROOT / "requirements" / "base.txt"],
) as venv:
code = f"""
import sys
@ -153,14 +153,14 @@ def set_salt_version(
ctx.exit(ctx.returncode)
salt_version = ret.stdout.strip().decode()
if not REPO_ROOT.joinpath("salt").is_dir():
if not tools.utils.REPO_ROOT.joinpath("salt").is_dir():
ctx.error(
"The path 'salt/' is not a directory. Unable to write 'salt/_version.txt'"
)
ctx.exit(1)
try:
REPO_ROOT.joinpath("salt/_version.txt").write_text(salt_version)
tools.utils.REPO_ROOT.joinpath("salt/_version.txt").write_text(salt_version)
except Exception as exc:
ctx.error(f"Unable to write 'salt/_version.txt': {exc}")
ctx.exit(1)
@ -211,7 +211,9 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
When running on Windows and macOS, some additional cleanup is also done.
"""
with open(str(REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")) as rfh:
with open(
str(tools.utils.REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")
) as rfh:
patterns = yaml.safe_load(rfh.read())
if pkg:
@ -317,7 +319,7 @@ def generate_hashes(ctx: Context, files: list[pathlib.Path]):
name="source-tarball",
venv_config={
"requirements_files": [
REPO_ROOT / "requirements" / "build.txt",
tools.utils.REPO_ROOT / "requirements" / "build.txt",
]
},
)
@ -342,20 +344,20 @@ def source_tarball(ctx: Context):
"-m",
"build",
"--sdist",
str(REPO_ROOT),
str(tools.utils.REPO_ROOT),
env=env,
check=True,
)
# Recreate sdist to be reproducible
recompress = Recompress(timestamp)
for targz in REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
ctx.info(f"Re-compressing {targz.relative_to(REPO_ROOT)} ...")
for targz in tools.utils.REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
ctx.info(f"Re-compressing {targz.relative_to(tools.utils.REPO_ROOT)} ...")
recompress.recompress(targz)
sha256sum = shutil.which("sha256sum")
if sha256sum:
packages = [
str(pkg.relative_to(REPO_ROOT))
for pkg in REPO_ROOT.joinpath("dist").iterdir()
str(pkg.relative_to(tools.utils.REPO_ROOT))
for pkg in tools.utils.REPO_ROOT.joinpath("dist").iterdir()
]
ctx.run("sha256sum", *packages)
ctx.run("python3", "-m", "twine", "check", "dist/*", check=True)

View file

@ -13,25 +13,17 @@ import shutil
import sys
import textwrap
from datetime import datetime
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Any
import packaging.version
from ptscripts import Context, command_group
import tools.pkg
import tools.utils
try:
import boto3
from botocore.exceptions import ClientError
from rich.progress import (
BarColumn,
Column,
DownloadColumn,
Progress,
TextColumn,
TimeRemainingColumn,
TransferSpeedColumn,
)
except ImportError:
print(
"\nPlease run 'python -m pip install -r "
@ -43,9 +35,6 @@ except ImportError:
log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
# Define the command group
repo = command_group(
name="repo",
@ -219,17 +208,14 @@ def debian(
rc_build=rc_build,
nightly_build=nightly_build,
)
ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf"
ctx.info(f"Writing {ftp_archive_config_file} ...")
ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config))
keyfile_gpg = create_repo_path / GPG_KEY_FILENAME
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)}.pub ..."
)
ctx.run(
"gpg", "--armor", "-o", str(keyfile_gpg.with_suffix(".pub")), "--export", key_id
)
# Export the GPG key in use
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
pool_path = create_repo_path / "pool"
pool_path.mkdir(exist_ok=True)
for fpath in incoming.iterdir():
@ -309,15 +295,27 @@ def debian(
ctx.info(f"Running '{' '.join(cmdline)}' ...")
ctx.run(*cmdline, cwd=create_repo_path)
if nightly_build is False:
ctx.info("Creating '<major-version>' and 'latest' symlinks ...")
major_version = packaging.version.parse(salt_version).major
major_link = create_repo_path.parent.parent / str(major_version)
major_link.symlink_to(f"minor/{salt_version}")
latest_link = create_repo_path.parent.parent / "latest"
latest_link.symlink_to(f"minor/{salt_version}")
remote_versions = _get_remote_versions(
tools.utils.STAGING_BUCKET_NAME,
create_repo_path.parent.relative_to(repo_path),
)
major_version = Version(salt_version).major
matching_major = None
for version in remote_versions:
if version.major == major_version:
matching_major = version
break
if not matching_major or matching_major < salt_version:
major_link = create_repo_path.parent.parent / str(major_version)
ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...")
major_link.symlink_to(f"minor/{salt_version}")
if not remote_versions or remote_versions[0] < salt_version:
latest_link = create_repo_path.parent.parent / "latest"
ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...")
latest_link.symlink_to(f"minor/{salt_version}")
else:
ctx.info("Creating 'latest' symlink ...")
latest_link = create_repo_path.parent / "latest"
ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...")
latest_link.symlink_to(create_repo_path.name)
ctx.info("Done")
@ -415,11 +413,8 @@ def rpm(
nightly_build=nightly_build,
)
keyfile_gpg = create_repo_path / GPG_KEY_FILENAME
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)}.gpg ..."
)
ctx.run("gpg", "-o", str(keyfile_gpg.with_suffix(".gpg")), "--export", key_id)
# Export the GPG key in use
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
for fpath in incoming.iterdir():
if ".src" in fpath.suffixes:
@ -474,10 +469,6 @@ def rpm(
base_url = "py3/"
repo_file_contents = "[salt-repo]"
base_url += f"{distro}/{url_suffix}"
if distro_version == "9":
gpg_key = f"{base_url}/SALTSTACK-GPG-KEY2.pub"
else:
gpg_key = f"{base_url}/SALTSTACK-GPG-KEY.pub"
if distro == "amazon":
distro_name = "Amazon Linux"
else:
@ -496,7 +487,7 @@ def rpm(
enabled=1
enabled_metadata=1
gpgcheck=1
gpgkey={gpg_key}
gpgkey={base_url}/{tools.utils.GPG_KEY_FILENAME}.pub
"""
if nightly_build:
@ -509,18 +500,31 @@ def rpm(
_create_repo_file(repo_file_path, salt_version)
if nightly_build is False and rc_build is False:
ctx.info("Creating '<major-version>' and 'latest' symlinks ...")
major_version = packaging.version.parse(salt_version).major
major_link = create_repo_path.parent.parent / str(major_version)
major_link.symlink_to(f"minor/{salt_version}")
latest_link = create_repo_path.parent.parent / "latest"
latest_link.symlink_to(f"minor/{salt_version}")
for name in (major_version, "latest"):
repo_file_path = create_repo_path.parent.parent / f"{name}.repo"
_create_repo_file(repo_file_path, name)
remote_versions = _get_remote_versions(
tools.utils.STAGING_BUCKET_NAME,
create_repo_path.parent.relative_to(repo_path),
)
major_version = Version(salt_version).major
matching_major = None
for version in remote_versions:
if version.major == major_version:
matching_major = version
break
if not matching_major or matching_major < salt_version:
major_link = create_repo_path.parent.parent / str(major_version)
ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...")
major_link.symlink_to(f"minor/{salt_version}")
repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo"
_create_repo_file(repo_file_path, str(major_version))
if not remote_versions or remote_versions[0] < salt_version:
latest_link = create_repo_path.parent.parent / "latest"
ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...")
latest_link.symlink_to(f"minor/{salt_version}")
repo_file_path = create_repo_path.parent.parent / "latest.repo"
_create_repo_file(repo_file_path, "latest")
else:
ctx.info("Creating 'latest' symlink and 'latest.repo' file ...")
latest_link = create_repo_path.parent / "latest"
ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...")
latest_link.symlink_to(create_repo_path.name)
repo_file_path = create_repo_path.parent.parent / "latest.repo"
_create_repo_file(repo_file_path, "latest")
@ -711,187 +715,82 @@ def onedir(
ctx.info("Done")
def _create_onedir_based_repo(
@create.command(
name="src",
arguments={
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
"repo_path": {
"help": "Path where the repository shall be created.",
"required": True,
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"incoming": {
"help": (
"The path to the directory containing the files that should added to "
"the repository."
),
"required": True,
},
"nightly_build": {
"help": "Developement repository target",
},
"rc_build": {
"help": "Release Candidate repository target",
},
},
)
def src(
ctx: Context,
salt_version: str,
nightly_build: bool,
rc_build: bool,
repo_path: pathlib.Path,
incoming: pathlib.Path,
key_id: str,
distro: str,
pkg_suffixes: tuple[str, ...],
salt_version: str = None,
incoming: pathlib.Path = None,
repo_path: pathlib.Path = None,
key_id: str = None,
nightly_build: bool = False,
rc_build: bool = False,
):
"""
Create the onedir repository.
"""
if TYPE_CHECKING:
assert salt_version is not None
assert incoming is not None
assert repo_path is not None
assert key_id is not None
ctx.info("Creating repository directory structure ...")
create_repo_path = _create_repo_path(
repo_path, salt_version, distro, rc_build=rc_build, nightly_build=nightly_build
)
if nightly_build is False:
repo_json_path = create_repo_path.parent.parent / "repo.json"
else:
repo_json_path = create_repo_path.parent / "repo.json"
if nightly_build:
bucket_name = "salt-project-prod-salt-artifacts-nightly"
else:
bucket_name = "salt-project-prod-salt-artifacts-staging"
s3 = boto3.client("s3")
try:
ret = s3.head_object(
Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path))
)
ctx.info("Downloading existing 'repo.json' file")
size = ret["ContentLength"]
with repo_json_path.open("wb") as wfh:
with create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj(
Bucket=bucket_name,
Key=str(repo_json_path.relative_to(repo_path)),
Fileobj=wfh,
Callback=UpdateProgress(progress, task),
)
with repo_json_path.open() as rfh:
repo_json = json.load(rfh)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
repo_json = {}
if salt_version not in repo_json:
repo_json[salt_version] = {}
copy_exclusions = (
".blake2b",
".sha512",
".sha3_512",
".BLAKE2B",
".SHA512",
".SHA3_512",
".json",
repo_path,
salt_version,
"src",
rc_build=rc_build,
nightly_build=nightly_build,
)
hashes_base_path = create_repo_path / f"salt-{salt_version}"
for fpath in incoming.iterdir():
if fpath.suffix in copy_exclusions:
if fpath.suffix not in (".gz",):
continue
ctx.info(f"* Processing {fpath} ...")
dpath = create_repo_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
if "-amd64" in dpath.name.lower():
arch = "amd64"
elif "-x86_64" in dpath.name.lower():
arch = "x86_64"
elif "-x86" in dpath.name.lower():
arch = "x86"
elif "-aarch64" in dpath.name.lower():
arch = "aarch64"
else:
ctx.error(
f"Cannot pickup the right architecture from the filename '{dpath.name}'."
)
ctx.exit(1)
repo_json[salt_version][dpath.name] = {
"name": dpath.name,
"version": salt_version,
"os": distro,
"arch": arch,
}
for hash_name in ("blake2b", "sha512", "sha3_512"):
ctx.info(f" * Calculating {hash_name} ...")
hexdigest = _get_file_checksum(fpath, hash_name)
repo_json[salt_version][dpath.name][hash_name.upper()] = hexdigest
with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh:
wfh.write(f"{hexdigest} {dpath.name}\n")
for fpath in create_repo_path.iterdir():
if fpath.suffix in pkg_suffixes:
continue
ctx.info(f"GPG Signing '{fpath.relative_to(repo_path)}' ...")
ctx.run("gpg", "-u", key_id, "-o", f"{fpath}.asc", "-a", "-b", "-s", str(fpath))
tools.utils.gpg_sign(ctx, key_id, fpath)
keyfile_gpg = create_repo_path / GPG_KEY_FILENAME
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)}.{{gpg,pub}} ..."
)
ctx.run("gpg", "-o", str(keyfile_gpg.with_suffix(".gpg")), "--export", key_id)
ctx.run(
"gpg", "--armor", "-o", str(keyfile_gpg.with_suffix(".pub")), "--export", key_id
)
if nightly_build is False:
versions_in_repo_json = {}
for version in repo_json:
if version == "latest":
continue
versions_in_repo_json[packaging.version.parse(version)] = version
latest_version = versions_in_repo_json[
sorted(versions_in_repo_json, reverse=True)[0]
]
if salt_version == latest_version:
repo_json["latest"] = repo_json[salt_version]
ctx.info("Creating '<major-version>' and 'latest' symlinks ...")
major_version = packaging.version.parse(salt_version).major
repo_json[str(major_version)] = repo_json[salt_version]
major_link = create_repo_path.parent.parent / str(major_version)
major_link.symlink_to(f"minor/{salt_version}")
latest_link = create_repo_path.parent.parent / "latest"
latest_link.symlink_to(f"minor/{salt_version}")
minor_repo_json_path = create_repo_path.parent / "repo.json"
try:
ret = s3.head_object(
Bucket=bucket_name, Key=str(minor_repo_json_path.relative_to(repo_path))
)
size = ret["ContentLength"]
ctx.info("Downloading existing 'minor/repo.json' file")
with minor_repo_json_path.open("wb") as wfh:
with create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj(
Bucket=bucket_name,
Key=str(minor_repo_json_path.relative_to(repo_path)),
Fileobj=wfh,
Callback=UpdateProgress(progress, task),
)
with minor_repo_json_path.open() as rfh:
minor_repo_json = json.load(rfh)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
minor_repo_json = {}
minor_repo_json[salt_version] = repo_json[salt_version]
minor_repo_json_path.write_text(json.dumps(minor_repo_json))
else:
ctx.info("Creating 'latest' symlink ...")
latest_link = create_repo_path.parent / "latest"
latest_link.symlink_to(create_repo_path.name)
repo_json_path.write_text(json.dumps(repo_json))
def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str:
with fpath.open("rb") as rfh:
try:
digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined]
except AttributeError:
# Python < 3.11
buf = bytearray(2**18) # Reusable buffer to reduce allocations.
view = memoryview(buf)
digest = getattr(hashlib, hash_name)()
while True:
size = rfh.readinto(buf)
if size == 0:
break # EOF
digest.update(view[:size])
hexdigest: str = digest.hexdigest()
return hexdigest
# Export the GPG key in use
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
ctx.info("Done")
@publish.command(
@ -941,6 +840,253 @@ def release(ctx: Context, repo_path: pathlib.Path, rc_build: bool = False):
"""
def _get_remote_versions(bucket_name: str, remote_path: str):
remote_path = str(remote_path)
if not remote_path.endswith("/"):
remote_path += "/"
s3 = boto3.client("s3")
ret = s3.list_objects(
Bucket=bucket_name,
Delimiter="/",
Prefix=remote_path,
)
if "CommonPrefixes" not in ret:
return []
versions = []
for entry in ret["CommonPrefixes"]:
_, version = entry["Prefix"].rstrip("/").rsplit("/", 1)
if version == "latest":
continue
versions.append(Version(version))
versions.sort(reverse=True)
return versions
def _create_onedir_based_repo(
ctx: Context,
salt_version: str,
nightly_build: bool,
rc_build: bool,
repo_path: pathlib.Path,
incoming: pathlib.Path,
key_id: str,
distro: str,
pkg_suffixes: tuple[str, ...],
):
ctx.info("Creating repository directory structure ...")
create_repo_path = _create_repo_path(
repo_path, salt_version, distro, rc_build=rc_build, nightly_build=nightly_build
)
if nightly_build is False:
repo_json_path = create_repo_path.parent.parent / "repo.json"
else:
repo_json_path = create_repo_path.parent / "repo.json"
if nightly_build:
bucket_name = tools.utils.NIGHTLY_BUCKET_NAME
else:
bucket_name = tools.utils.STAGING_BUCKET_NAME
release_json = {}
copy_exclusions = (
".blake2b",
".sha512",
".sha3_512",
".BLAKE2B",
".SHA512",
".SHA3_512",
".json",
)
hashes_base_path = create_repo_path / f"salt-{salt_version}"
for fpath in incoming.iterdir():
if fpath.suffix in copy_exclusions:
continue
ctx.info(f"* Processing {fpath} ...")
dpath = create_repo_path / fpath.name
ctx.info(f"Copying {fpath} to {dpath} ...")
shutil.copyfile(fpath, dpath)
if "-amd64" in dpath.name.lower():
arch = "amd64"
elif "-x86_64" in dpath.name.lower():
arch = "x86_64"
elif "-x86" in dpath.name.lower():
arch = "x86"
elif "-aarch64" in dpath.name.lower():
arch = "aarch64"
else:
ctx.error(
f"Cannot pickup the right architecture from the filename '{dpath.name}'."
)
ctx.exit(1)
if distro == "onedir":
if "-onedir-linux-" in dpath.name.lower():
release_os = "linux"
elif "-onedir-darwin-" in dpath.name.lower():
release_os = "macos"
elif "-onedir-windows-" in dpath.name.lower():
release_os = "windows"
else:
ctx.error(
f"Cannot pickup the right OS from the filename '{dpath.name}'."
)
ctx.exit(1)
else:
release_os = distro
release_json[dpath.name] = {
"name": dpath.name,
"version": salt_version,
"os": release_os,
"arch": arch,
}
for hash_name in ("blake2b", "sha512", "sha3_512"):
ctx.info(f" * Calculating {hash_name} ...")
hexdigest = _get_file_checksum(fpath, hash_name)
release_json[dpath.name][hash_name.upper()] = hexdigest
with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh:
wfh.write(f"{hexdigest} {dpath.name}\n")
for fpath in create_repo_path.iterdir():
if fpath.suffix in pkg_suffixes:
continue
tools.utils.gpg_sign(ctx, key_id, fpath)
# Export the GPG key in use
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
repo_json = _get_repo_json_file_contents(
ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path
)
if nightly_build is True:
latest_link = create_repo_path.parent / "latest"
ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...")
latest_link.symlink_to(create_repo_path.name)
ctx.info(f"Writing {repo_json_path} ...")
repo_json_path.write_text(json.dumps(repo_json, sort_keys=True))
return
major_version = Version(salt_version).major
minor_repo_json_path = create_repo_path.parent / "repo.json"
minor_repo_json = _get_repo_json_file_contents(
ctx,
bucket_name=bucket_name,
repo_path=repo_path,
repo_json_path=minor_repo_json_path,
)
minor_repo_json[salt_version] = release_json
versions = _parse_versions(*list(minor_repo_json))
ctx.info(
f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: "
f"{', '.join(str(vs) for vs in versions)}"
)
minor_versions = [v for v in versions if v.major == major_version]
ctx.info(
f"Collected versions(Matching major: {major_version}) from {minor_repo_json_path.relative_to(repo_path)}: "
f"{', '.join(str(vs) for vs in minor_versions)}"
)
if not versions:
latest_version = Version(salt_version)
else:
latest_version = versions[0]
if not minor_versions:
latest_minor_version = Version(salt_version)
else:
latest_minor_version = minor_versions[0]
ctx.info(f"Release Version: {salt_version}")
ctx.info(f"Latest Repo Version: {latest_version}")
ctx.info(f"Latest Release Minor Version: {latest_minor_version}")
latest_link = create_repo_path.parent.parent / "latest"
if latest_version <= salt_version:
repo_json["latest"] = release_json
ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...")
if latest_link.exists():
latest_link.unlink()
latest_link.symlink_to(f"minor/{salt_version}")
else:
ctx.info(
f"Not creating the '{latest_link.relative_to(repo_path)}' symlink "
f"since {latest_version} > {salt_version}"
)
major_link = create_repo_path.parent.parent / str(major_version)
if latest_minor_version <= salt_version:
minor_repo_json["latest"] = release_json
# This is the latest minor, update the major in the top level repo.json
# to this version
repo_json[str(major_version)] = release_json
ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...")
if major_link.exists():
major_link.unlink()
major_link.symlink_to(f"minor/{salt_version}")
else:
ctx.info(
f"Not creating the '{major_link.relative_to(repo_path)}' symlink "
f"since {latest_minor_version} > {salt_version}"
)
ctx.info(f"Writing {minor_repo_json_path} ...")
minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True))
ctx.info(f"Writing {repo_json_path} ...")
repo_json_path.write_text(json.dumps(repo_json, sort_keys=True))
def _get_repo_json_file_contents(
ctx: Context,
bucket_name: str,
repo_path: pathlib.Path,
repo_json_path: pathlib.Path,
) -> dict[str, Any]:
s3 = boto3.client("s3")
repo_json: dict[str, Any] = {}
try:
ret = s3.head_object(
Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path))
)
ctx.info(f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file")
size = ret["ContentLength"]
with repo_json_path.open("wb") as wfh:
with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj(
Bucket=bucket_name,
Key=str(repo_json_path.relative_to(repo_path)),
Fileobj=wfh,
Callback=tools.utils.UpdateProgress(progress, task),
)
with repo_json_path.open() as rfh:
repo_json = json.load(rfh)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
return repo_json
def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str:
with fpath.open("rb") as rfh:
try:
digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined]
except AttributeError:
# Python < 3.11
buf = bytearray(2**18) # Reusable buffer to reduce allocations.
view = memoryview(buf)
digest = getattr(hashlib, hash_name)()
while True:
size = rfh.readinto(buf)
if size == 0:
break # EOF
digest.update(view[:size])
hexdigest: str = digest.hexdigest()
return hexdigest
def _publish_repo(
ctx: Context,
repo_path: pathlib.Path,
@ -952,11 +1098,11 @@ def _publish_repo(
Publish packaging repositories.
"""
if nightly_build:
bucket_name = "salt-project-prod-salt-artifacts-nightly"
bucket_name = tools.utils.NIGHTLY_BUCKET_NAME
elif stage:
bucket_name = "salt-project-prod-salt-artifacts-staging"
bucket_name = tools.utils.STAGING_BUCKET_NAME
else:
bucket_name = "salt-project-prod-salt-artifacts-release"
bucket_name = tools.utils.RELEASE_BUCKET_NAME
ctx.info("Preparing upload ...")
s3 = boto3.client("s3")
@ -993,7 +1139,7 @@ def _publish_repo(
path = pathlib.Path(dirpath, fpath)
to_upload_paths.append(path)
with create_progress_bar() as progress:
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
"Deleting directories to override.", total=len(to_delete_paths)
)
@ -1011,57 +1157,24 @@ def _publish_repo(
finally:
progress.update(task, advance=1)
def update_progress(progress, task, chunk):
progress.update(task, completed=chunk)
try:
ctx.info("Uploading repository ...")
for upload_path in to_upload_paths:
relpath = upload_path.relative_to(repo_path)
size = upload_path.stat().st_size
ctx.info(f" {relpath}")
with create_progress_bar(file_progress=True) as progress:
with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Uploading...", total=size)
s3.upload_file(
str(upload_path),
bucket_name,
str(relpath),
Callback=UpdateProgress(progress, task),
Callback=tools.utils.UpdateProgress(progress, task),
)
except KeyboardInterrupt:
pass
class UpdateProgress:
def __init__(self, progress, task):
self.progress = progress
self.task = task
def __call__(self, chunk_size):
self.progress.update(self.task, advance=chunk_size)
def create_progress_bar(file_progress: bool = False, **kwargs):
if file_progress:
return Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(),
DownloadColumn(),
TransferSpeedColumn(),
TextColumn("eta"),
TimeRemainingColumn(),
**kwargs,
)
return Progress(
TextColumn(
"[progress.description]{task.description}", table_column=Column(ratio=3)
),
BarColumn(),
expand=True,
**kwargs,
)
def _create_repo_path(
repo_path: pathlib.Path,
salt_version: str,
@ -1082,8 +1195,53 @@ def _create_repo_path(
if distro_arch:
create_repo_path = create_repo_path / distro_arch
if nightly_build is False:
create_repo_path = create_repo_path / "minor" / salt_version
if distro != "src":
create_repo_path = create_repo_path / "minor"
create_repo_path = create_repo_path / salt_version
else:
create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d")
create_repo_path.mkdir(exist_ok=True, parents=True)
return create_repo_path
def _parse_versions(*versions: str) -> list[Version]:
_versions = []
for version in set(versions):
if version == "latest":
continue
_versions.append(Version(version))
if _versions:
_versions.sort(reverse=True)
return _versions
class Version(packaging.version.Version):
def __lt__(self, other):
if not isinstance(other, self.__class__):
other = self.__class__(other)
return super().__lt__(other)
def __le__(self, other):
if not isinstance(other, self.__class__):
other = self.__class__(other)
return super().__le__(other)
def __eq__(self, other):
if not isinstance(other, self.__class__):
other = self.__class__(other)
return super().__eq__(other)
def __ge__(self, other):
if not isinstance(other, self.__class__):
other = self.__class__(other)
return super().__ge__(other)
def __gt__(self, other):
if not isinstance(other, self.__class__):
other = self.__class__(other)
return super().__gt__(other)
def __ne__(self, other):
if not isinstance(other, self.__class__):
other = self.__class__(other)
return super().__ne__(other)

View file

@ -5,17 +5,17 @@ These commands are used by pre-commit.
from __future__ import annotations
import logging
import pathlib
import shutil
from typing import TYPE_CHECKING, cast
from jinja2 import Environment, FileSystemLoader
from ptscripts import Context, command_group
import tools.utils
log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows"
TEMPLATES = WORKFLOWS / "templates"
# Define the command group
@ -81,11 +81,11 @@ def generate_workflows(ctx: Context):
workflow_path = WORKFLOWS / template
template_path = TEMPLATES / f"{template}.jinja"
ctx.info(
f"Generating '{workflow_path.relative_to(REPO_ROOT)}' from "
f"template '{template_path.relative_to(REPO_ROOT)}' ..."
f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from "
f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..."
)
context = {
"template": template_path.relative_to(REPO_ROOT),
"template": template_path.relative_to(tools.utils.REPO_ROOT),
"workflow_name": workflow_name,
"includes": includes,
"conclusion_needs": NeedsTracker(),

88
tools/utils.py Normal file
View file

@ -0,0 +1,88 @@
# pylint: disable=resource-leakage,broad-except
from __future__ import annotations
import pathlib
from ptscripts import Context
from rich.progress import (
BarColumn,
Column,
DownloadColumn,
Progress,
TextColumn,
TimeRemainingColumn,
TransferSpeedColumn,
)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
NIGHTLY_BUCKET_NAME = "salt-project-prod-salt-artifacts-nightly"
STAGING_BUCKET_NAME = "salt-project-prod-salt-artifacts-staging"
RELEASE_BUCKET_NAME = "salt-project-prod-salt-artifacts-release"
class UpdateProgress:
def __init__(self, progress, task):
self.progress = progress
self.task = task
def __call__(self, chunk_size):
self.progress.update(self.task, advance=chunk_size)
def create_progress_bar(file_progress: bool = False, **kwargs):
if file_progress:
return Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(),
DownloadColumn(),
TransferSpeedColumn(),
TextColumn("eta"),
TimeRemainingColumn(),
**kwargs,
)
return Progress(
TextColumn(
"[progress.description]{task.description}", table_column=Column(ratio=3)
),
BarColumn(),
expand=True,
**kwargs,
)
def export_gpg_key(
ctx: Context, key_id: str, repo_path: pathlib.Path, create_repo_path: pathlib.Path
):
keyfile_gpg = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg")
if keyfile_gpg.exists():
keyfile_gpg.unlink()
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)} ..."
)
ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id)
keyfile_pub = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub")
if keyfile_pub.exists():
keyfile_pub.unlink()
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_pub.relative_to(repo_path)} ..."
)
ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id)
def gpg_sign(ctx: Context, key_id: str, path: pathlib.Path):
ctx.info(f"GPG Signing '{path}' ...")
signature_fpath = path.parent / f"{path.name}.asc"
if signature_fpath.exists():
signature_fpath.unlink()
ctx.run(
"gpg",
"--local-user",
key_id,
"--output",
str(signature_fpath),
"--armor",
"--detach-sign",
"--sign",
str(path),
)

View file

@ -22,6 +22,8 @@ from typing import TYPE_CHECKING, cast
from ptscripts import Context, command_group
import tools.utils
try:
import attr
import boto3
@ -52,12 +54,11 @@ if TYPE_CHECKING:
log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).parent.parent
STATE_DIR = REPO_ROOT / ".vms-state"
with REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
STATE_DIR = tools.utils.REPO_ROOT / ".vms-state"
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
AMIS = json.load(rfh)
REPO_CHECKOUT_ID = hashlib.sha256(
"|".join(list(platform.uname()) + [str(REPO_ROOT)]).encode()
"|".join(list(platform.uname()) + [str(tools.utils.REPO_ROOT)]).encode()
).hexdigest()
AWS_REGION = (
os.environ.get("AWS_DEFAULT_REGION") or os.environ.get("AWS_REGION") or "us-west-2"
@ -982,7 +983,7 @@ class VM:
"--exclude",
".pytest_cache/",
"--exclude",
f"{STATE_DIR.relative_to(REPO_ROOT)}{os.path.sep}",
f"{STATE_DIR.relative_to(tools.utils.REPO_ROOT)}{os.path.sep}",
"--exclude",
"*.py~",
# We need to include artifacts/ to be able to include artifacts/salt
@ -999,7 +1000,7 @@ class VM:
# symlink with a copy of what's getting symlinked.
rsync_flags.append("--copy-links")
# Local repo path
source = f"{REPO_ROOT}{os.path.sep}"
source = f"{tools.utils.REPO_ROOT}{os.path.sep}"
# Remote repo path
remote_path = self.upload_path.as_posix()
if self.is_windows:
@ -1014,7 +1015,7 @@ class VM:
return
write_env = {k: str(v) for (k, v) in env.items()}
write_env_filename = ".ci-env"
write_env_filepath = REPO_ROOT / ".ci-env"
write_env_filepath = tools.utils.REPO_ROOT / ".ci-env"
write_env_filepath.write_text(json.dumps(write_env))
# Local path
@ -1241,7 +1242,7 @@ class VM:
_ssh_command_args = [
ssh,
"-F",
str(self.ssh_config_file.relative_to(REPO_ROOT)),
str(self.ssh_config_file.relative_to(tools.utils.REPO_ROOT)),
]
if ssh_options:
_ssh_command_args.extend(ssh_options)