Refactor the tools pkg repo commands into a subdirectory

This commit is contained in:
MKLeb 2023-05-02 20:53:24 -04:00 committed by Pedro Algarvio
parent e87ef03605
commit 3cd21ceb88
6 changed files with 2001 additions and 1906 deletions

View file

@ -8,6 +8,8 @@ ptscripts.register_tools_module("tools.docs")
ptscripts.register_tools_module("tools.pkg")
ptscripts.register_tools_module("tools.pkg.repo")
ptscripts.register_tools_module("tools.pkg.build")
ptscripts.register_tools_module("tools.pkg.repo.create")
ptscripts.register_tools_module("tools.pkg.repo.publish")
ptscripts.register_tools_module("tools.pre_commit")
ptscripts.register_tools_module("tools.release")
ptscripts.register_tools_module("tools.vm")

File diff suppressed because it is too large Load diff

181
tools/pkg/repo/__init__.py Normal file
View file

@ -0,0 +1,181 @@
"""
These commands are used to build the pacakge repository files.
"""
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import logging
import os
import pathlib
import shutil
import sys
from typing import TYPE_CHECKING
from ptscripts import Context, command_group
import tools.pkg
import tools.utils
from tools.utils import Version, get_salt_releases
try:
import boto3
from botocore.exceptions import ClientError
except ImportError:
print(
"\nPlease run 'python -m pip install -r "
"requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
file=sys.stderr,
flush=True,
)
raise
log = logging.getLogger(__name__)
# Define the command group
repo = command_group(
name="repo",
help="Packaging Repository Related Commands",
description=__doc__,
parent="pkg",
)
@repo.command(name="backup-previous-releases")
def backup_previous_releases(ctx: Context):
"""
Backup release bucket.
"""
_rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME)
ctx.info("Done")
@repo.command(name="restore-previous-releases")
def restore_previous_releases(ctx: Context):
"""
Restore release bucket from backup.
"""
_rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME)
github_output = os.environ.get("GITHUB_OUTPUT")
if github_output is not None:
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"backup-complete=true\n")
ctx.info("Done")
def _rclone(ctx: Context, src: str, dst: str):
rclone = shutil.which("rclone")
if not rclone:
ctx.error("Could not find the rclone binary")
ctx.exit(1)
if TYPE_CHECKING:
assert rclone
env = os.environ.copy()
env["RCLONE_CONFIG_S3_TYPE"] = "s3"
cmdline: list[str] = [
rclone,
"sync",
"--auto-confirm",
"--human-readable",
"--checksum",
"--color=always",
"--metadata",
"--s3-env-auth",
"--s3-location-constraint=us-west-2",
"--s3-provider=AWS",
"--s3-region=us-west-2",
"--stats-file-name-length=0",
"--stats-one-line",
"--stats=5s",
"--transfers=50",
"--fast-list",
"--verbose",
]
if src == tools.utils.RELEASE_BUCKET_NAME:
cmdline.append("--s3-storage-class=INTELLIGENT_TIERING")
cmdline.extend([f"s3://{src}", f"s3://{dst}"])
ctx.info(f"Running: {' '.join(cmdline)}")
ret = ctx.run(*cmdline, env=env, check=False)
if ret.returncode:
ctx.error(f"Failed to sync from s3://{src} to s3://{dst}")
ctx.exit(1)
@repo.command(
name="confirm-unreleased",
arguments={
"salt_version": {
"help": "The salt version to check",
},
"repository": {
"help": (
"The full repository name, ie, 'saltstack/salt' on GitHub "
"to run the checks against."
)
},
},
)
def confirm_unreleased(
ctx: Context, salt_version: str, repository: str = "saltstack/salt"
):
"""
Confirm that the passed version is not yet tagged and/or released.
"""
releases = get_salt_releases(ctx, repository)
if Version(salt_version) in releases:
ctx.error(f"There's already a '{salt_version}' tag or github release.")
ctx.exit(1)
ctx.info(f"Could not find a release for Salt Version '{salt_version}'")
ctx.exit(0)
@repo.command(
name="confirm-staged",
arguments={
"salt_version": {
"help": "The salt version to check",
},
"repository": {
"help": (
"The full repository name, ie, 'saltstack/salt' on GitHub "
"to run the checks against."
)
},
},
)
def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"):
"""
Confirm that the passed version has been staged for release.
"""
s3 = boto3.client("s3")
repo_release_files_path = pathlib.Path(
f"release-artifacts/{salt_version}/.release-files.json"
)
repo_release_symlinks_path = pathlib.Path(
f"release-artifacts/{salt_version}/.release-symlinks.json"
)
for remote_path in (repo_release_files_path, repo_release_symlinks_path):
try:
bucket_name = tools.utils.STAGING_BUCKET_NAME
ctx.info(
f"Checking for the presence of {remote_path} on bucket {bucket_name} ..."
)
s3.head_object(
Bucket=bucket_name,
Key=str(remote_path),
)
except ClientError as exc:
if "Error" not in exc.response:
log.exception(f"Could not get information about {remote_path}: {exc}")
ctx.exit(1)
if exc.response["Error"]["Code"] == "404":
ctx.error(f"Could not find {remote_path} in bucket.")
ctx.exit(1)
if exc.response["Error"]["Code"] == "400":
ctx.error(f"Could get information about {remote_path}: {exc}")
ctx.exit(1)
log.exception(f"Error getting information about {remote_path}: {exc}")
ctx.exit(1)
ctx.info(f"Version {salt_version} has been staged for release")
ctx.exit(0)

1038
tools/pkg/repo/create.py Normal file

File diff suppressed because it is too large Load diff

653
tools/pkg/repo/publish.py Normal file
View file

@ -0,0 +1,653 @@
"""
These commands are used to build the pacakge repository files.
"""
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import fnmatch
import json
import logging
import os
import pathlib
import re
import sys
import tempfile
import textwrap
from typing import TYPE_CHECKING, Any
import packaging.version
from ptscripts import Context, command_group
import tools.pkg
import tools.utils
from tools.utils import (
Version,
create_full_repo_path,
get_repo_json_file_contents,
get_salt_releases,
parse_versions,
)
try:
import boto3
from botocore.exceptions import ClientError
except ImportError:
print(
"\nPlease run 'python -m pip install -r "
"requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
file=sys.stderr,
flush=True,
)
raise
log = logging.getLogger(__name__)
publish = command_group(
name="publish",
help="Packaging Repository Publication Related Commands",
parent=["pkg", "repo"],
)
@publish.command(
arguments={
"repo_path": {
"help": "Local path for the repository that shall be published.",
},
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
}
)
def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None):
"""
Publish to the nightly bucket.
"""
if TYPE_CHECKING:
assert salt_version is not None
_publish_repo(
ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version
)
@publish.command(
arguments={
"repo_path": {
"help": "Local path for the repository that shall be published.",
},
"salt_version": {
"help": "The salt version for which to build the repository",
"required": True,
},
}
)
def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None):
"""
Publish to the staging bucket.
"""
if TYPE_CHECKING:
assert salt_version is not None
_publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version)
@publish.command(
arguments={
"salt_version": {
"help": "The salt version to release.",
},
}
)
def release(ctx: Context, salt_version: str):
"""
Publish to the release bucket.
"""
if "rc" in salt_version:
bucket_folder = "salt_rc/salt/py3"
else:
bucket_folder = "salt/py3"
files_to_copy: list[str]
directories_to_delete: list[str] = []
ctx.info("Grabbing remote file listing of files to copy...")
s3 = boto3.client("s3")
repo_release_files_path = pathlib.Path(
f"release-artifacts/{salt_version}/.release-files.json"
)
repo_release_symlinks_path = pathlib.Path(
f"release-artifacts/{salt_version}/.release-symlinks.json"
)
with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd:
local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name
try:
bucket_name = tools.utils.STAGING_BUCKET_NAME
with local_release_files_path.open("wb") as wfh:
ctx.info(
f"Downloading {repo_release_files_path} from bucket {bucket_name} ..."
)
s3.download_fileobj(
Bucket=bucket_name,
Key=str(repo_release_files_path),
Fileobj=wfh,
)
files_to_copy = json.loads(local_release_files_path.read_text())
except ClientError as exc:
if "Error" not in exc.response:
log.exception(f"Error downloading {repo_release_files_path}: {exc}")
ctx.exit(1)
if exc.response["Error"]["Code"] == "404":
ctx.error(f"Could not find {repo_release_files_path} in bucket.")
ctx.exit(1)
if exc.response["Error"]["Code"] == "400":
ctx.error(
f"Could not download {repo_release_files_path} from bucket: {exc}"
)
ctx.exit(1)
log.exception(f"Error downloading {repo_release_files_path}: {exc}")
ctx.exit(1)
local_release_symlinks_path = (
pathlib.Path(tsd) / repo_release_symlinks_path.name
)
try:
with local_release_symlinks_path.open("wb") as wfh:
ctx.info(
f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..."
)
s3.download_fileobj(
Bucket=bucket_name,
Key=str(repo_release_symlinks_path),
Fileobj=wfh,
)
directories_to_delete = json.loads(local_release_symlinks_path.read_text())
except ClientError as exc:
if "Error" not in exc.response:
log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}")
ctx.exit(1)
if exc.response["Error"]["Code"] == "404":
ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.")
ctx.exit(1)
if exc.response["Error"]["Code"] == "400":
ctx.error(
f"Could not download {repo_release_symlinks_path} from bucket: {exc}"
)
ctx.exit(1)
log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}")
ctx.exit(1)
if directories_to_delete:
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
"Deleting directories to override.",
total=len(directories_to_delete),
)
for directory in directories_to_delete:
try:
objects_to_delete: list[dict[str, str]] = []
for path in _get_repo_file_list(
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
bucket_folder=bucket_folder,
glob_match=f"{directory}/**",
):
objects_to_delete.append({"Key": path})
if objects_to_delete:
s3.delete_objects(
Bucket=tools.utils.RELEASE_BUCKET_NAME,
Delete={"Objects": objects_to_delete},
)
except ClientError:
log.exception("Failed to delete remote files")
finally:
progress.update(task, advance=1)
already_copied_files: list[str] = []
s3 = boto3.client("s3")
dot_repo_files = []
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
"Copying files between buckets", total=len(files_to_copy)
)
for fpath in files_to_copy:
if fpath in already_copied_files:
continue
if fpath.endswith(".repo"):
dot_repo_files.append(fpath)
ctx.info(f" * Copying {fpath}")
try:
s3.copy_object(
Bucket=tools.utils.RELEASE_BUCKET_NAME,
Key=fpath,
CopySource={
"Bucket": tools.utils.STAGING_BUCKET_NAME,
"Key": fpath,
},
MetadataDirective="COPY",
TaggingDirective="COPY",
ServerSideEncryption="AES256",
)
already_copied_files.append(fpath)
except ClientError:
log.exception(f"Failed to copy {fpath}")
finally:
progress.update(task, advance=1)
# Now let's get the onedir based repositories where we need to update several repo.json
major_version = packaging.version.parse(salt_version).major
with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd:
repo_path = pathlib.Path(tsd)
for distro in ("windows", "macos", "onedir"):
create_repo_path = create_full_repo_path(
ctx,
repo_path,
salt_version,
distro=distro,
)
repo_json_path = create_repo_path.parent.parent / "repo.json"
release_repo_json = get_repo_json_file_contents(
ctx,
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
repo_path=repo_path,
repo_json_path=repo_json_path,
)
minor_repo_json_path = create_repo_path.parent / "repo.json"
staging_minor_repo_json = get_repo_json_file_contents(
ctx,
bucket_name=tools.utils.STAGING_BUCKET_NAME,
repo_path=repo_path,
repo_json_path=minor_repo_json_path,
)
release_minor_repo_json = get_repo_json_file_contents(
ctx,
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
repo_path=repo_path,
repo_json_path=minor_repo_json_path,
)
release_json = staging_minor_repo_json[salt_version]
major_version = Version(salt_version).major
versions = parse_versions(*list(release_minor_repo_json))
ctx.info(
f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: "
f"{', '.join(str(vs) for vs in versions)}"
)
minor_versions = [v for v in versions if v.major == major_version]
ctx.info(
f"Collected versions(Matching major: {major_version}) from "
f"{minor_repo_json_path.relative_to(repo_path)}: "
f"{', '.join(str(vs) for vs in minor_versions)}"
)
if not versions:
latest_version = Version(salt_version)
else:
latest_version = versions[0]
if not minor_versions:
latest_minor_version = Version(salt_version)
else:
latest_minor_version = minor_versions[0]
ctx.info(f"Release Version: {salt_version}")
ctx.info(f"Latest Repo Version: {latest_version}")
ctx.info(f"Latest Release Minor Version: {latest_minor_version}")
# Add the minor version
release_minor_repo_json[salt_version] = release_json
if latest_version <= salt_version:
release_repo_json["latest"] = release_json
if latest_minor_version <= salt_version:
release_minor_repo_json["latest"] = release_json
ctx.info(f"Writing {minor_repo_json_path} ...")
minor_repo_json_path.write_text(
json.dumps(release_minor_repo_json, sort_keys=True)
)
ctx.info(f"Writing {repo_json_path} ...")
repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True))
# And now, let's get the several rpm "*.repo" files to update the base
# domain from staging to release
release_domain = os.environ.get(
"SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io"
)
for path in dot_repo_files:
repo_file_path = repo_path.joinpath(path)
repo_file_path.parent.mkdir(exist_ok=True, parents=True)
bucket_name = tools.utils.STAGING_BUCKET_NAME
try:
ret = s3.head_object(Bucket=bucket_name, Key=path)
ctx.info(
f"Downloading existing '{repo_file_path.relative_to(repo_path)}' "
f"file from bucket {bucket_name}"
)
size = ret["ContentLength"]
with repo_file_path.open("wb") as wfh:
with tools.utils.create_progress_bar(
file_progress=True
) as progress:
task = progress.add_task(
description="Downloading...", total=size
)
s3.download_fileobj(
Bucket=bucket_name,
Key=path,
Fileobj=wfh,
Callback=tools.utils.UpdateProgress(progress, task),
)
updated_contents = re.sub(
r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$",
rf"\1=https://{release_domain}/\3",
repo_file_path.read_text(),
flags=re.MULTILINE,
)
ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:")
ctx.print(updated_contents)
repo_file_path.write_text(updated_contents)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}")
for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True):
for path in filenames:
upload_path = pathlib.Path(dirpath, path)
relpath = upload_path.relative_to(repo_path)
size = upload_path.stat().st_size
ctx.info(f" {relpath}")
with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Uploading...", total=size)
s3.upload_file(
str(upload_path),
tools.utils.RELEASE_BUCKET_NAME,
str(relpath),
Callback=tools.utils.UpdateProgress(progress, task),
)
@publish.command(
arguments={
"salt_version": {
"help": "The salt version to release.",
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
"repository": {
"help": (
"The full repository name, ie, 'saltstack/salt' on GitHub "
"to run the checks against."
)
},
}
)
def github(
ctx: Context,
salt_version: str,
key_id: str = None,
repository: str = "saltstack/salt",
):
"""
Publish the release on GitHub releases.
"""
if TYPE_CHECKING:
assert key_id is not None
s3 = boto3.client("s3")
# Let's download the release artifacts stored in staging
artifacts_path = pathlib.Path.cwd() / "release-artifacts"
artifacts_path.mkdir(exist_ok=True)
release_artifacts_listing: dict[pathlib.Path, int] = {}
continuation_token = None
while True:
kwargs: dict[str, str] = {}
if continuation_token:
kwargs["ContinuationToken"] = continuation_token
ret = s3.list_objects_v2(
Bucket=tools.utils.STAGING_BUCKET_NAME,
Prefix=f"release-artifacts/{salt_version}",
FetchOwner=False,
**kwargs,
)
contents = ret.pop("Contents", None)
if contents is None:
break
for entry in contents:
entry_path = pathlib.Path(entry["Key"])
if entry_path.name.startswith("."):
continue
release_artifacts_listing[entry_path] = entry["Size"]
if not ret["IsTruncated"]:
break
continuation_token = ret["NextContinuationToken"]
for entry_path, size in release_artifacts_listing.items():
ctx.info(f" * {entry_path.name}")
local_path = artifacts_path / entry_path.name
with local_path.open("wb") as wfh:
with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj(
Bucket=tools.utils.STAGING_BUCKET_NAME,
Key=str(entry_path),
Fileobj=wfh,
Callback=tools.utils.UpdateProgress(progress, task),
)
for artifact in artifacts_path.iterdir():
if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"):
continue
tools.utils.gpg_sign(ctx, key_id, artifact)
# Export the GPG key in use
tools.utils.export_gpg_key(ctx, key_id, artifacts_path)
release_message = f"""\
# Welcome to Salt v{salt_version}
| :exclamation: ATTENTION |
|:-------------------------------------------------------------------------------------------------------------------------|
| The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. |
| Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`).
"""
release_message_path = artifacts_path / "gh-release-body.md"
release_message_path.write_text(textwrap.dedent(release_message).strip())
github_output = os.environ.get("GITHUB_OUTPUT")
if github_output is None:
ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.")
ctx.exit(0)
if TYPE_CHECKING:
assert github_output is not None
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"release-messsage-file={release_message_path.resolve()}\n")
releases = get_salt_releases(ctx, repository)
if Version(salt_version) >= releases[-1]:
make_latest = True
else:
make_latest = False
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"make-latest={json.dumps(make_latest)}\n")
artifacts_to_upload = []
for artifact in artifacts_path.iterdir():
if artifact.suffix == ".patch":
continue
if artifact.name == release_message_path.name:
continue
artifacts_to_upload.append(str(artifact.resolve()))
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n")
ctx.exit(0)
def _get_repo_detailed_file_list(
bucket_name: str,
bucket_folder: str = "",
glob_match: str = "**",
) -> list[dict[str, Any]]:
s3 = boto3.client("s3")
listing: list[dict[str, Any]] = []
continuation_token = None
while True:
kwargs: dict[str, str] = {}
if continuation_token:
kwargs["ContinuationToken"] = continuation_token
ret = s3.list_objects_v2(
Bucket=bucket_name,
Prefix=bucket_folder,
FetchOwner=False,
**kwargs,
)
contents = ret.pop("Contents", None)
if contents is None:
break
for entry in contents:
if fnmatch.fnmatch(entry["Key"], glob_match):
listing.append(entry)
if not ret["IsTruncated"]:
break
continuation_token = ret["NextContinuationToken"]
return listing
def _get_repo_file_list(
bucket_name: str, bucket_folder: str, glob_match: str
) -> list[str]:
return [
entry["Key"]
for entry in _get_repo_detailed_file_list(
bucket_name, bucket_folder, glob_match=glob_match
)
]
def _publish_repo(
ctx: Context,
repo_path: pathlib.Path,
salt_version: str,
nightly_build: bool = False,
stage: bool = False,
):
"""
Publish packaging repositories.
"""
if nightly_build:
bucket_name = tools.utils.RELEASE_BUCKET_NAME
elif stage:
bucket_name = tools.utils.STAGING_BUCKET_NAME
else:
bucket_name = tools.utils.RELEASE_BUCKET_NAME
ctx.info("Preparing upload ...")
s3 = boto3.client("s3")
to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {}
to_upload_paths: list[pathlib.Path] = []
symlink_paths: list[str] = []
uploaded_files: list[str] = []
for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True):
for dirname in dirnames:
path = pathlib.Path(dirpath, dirname)
if not path.is_symlink():
continue
# This is a symlink, then we need to delete all files under
# that directory in S3 because S3 does not understand symlinks
# and we would end up adding files to that folder instead of
# replacing it.
try:
relpath = path.relative_to(repo_path)
ret = s3.list_objects(
Bucket=bucket_name,
Prefix=str(relpath),
)
if "Contents" not in ret:
continue
objects = []
for entry in ret["Contents"]:
objects.append({"Key": entry["Key"]})
to_delete_paths[path] = objects
symlink_paths.append(str(relpath))
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
for fpath in filenames:
path = pathlib.Path(dirpath, fpath)
to_upload_paths.append(path)
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
"Deleting directories to override.", total=len(to_delete_paths)
)
for base, objects in to_delete_paths.items():
relpath = base.relative_to(repo_path)
bucket_uri = f"s3://{bucket_name}/{relpath}"
progress.update(task, description=f"Deleting {bucket_uri}")
try:
ret = s3.delete_objects(
Bucket=bucket_name,
Delete={"Objects": objects},
)
except ClientError:
log.exception(f"Failed to delete {bucket_uri}")
finally:
progress.update(task, advance=1)
try:
ctx.info("Uploading repository ...")
for upload_path in to_upload_paths:
relpath = upload_path.relative_to(repo_path)
size = upload_path.stat().st_size
ctx.info(f" {relpath}")
with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Uploading...", total=size)
s3.upload_file(
str(upload_path),
bucket_name,
str(relpath),
Callback=tools.utils.UpdateProgress(progress, task),
ExtraArgs={
"Metadata": {
"x-amz-meta-salt-release-version": salt_version,
}
},
)
uploaded_files.append(str(relpath))
if stage is True:
repo_files_path = f"release-artifacts/{salt_version}/.release-files.json"
ctx.info(f"Uploading {repo_files_path} ...")
s3.put_object(
Key=repo_files_path,
Bucket=bucket_name,
Body=json.dumps(uploaded_files).encode(),
Metadata={
"x-amz-meta-salt-release-version": salt_version,
},
)
repo_symlinks_path = (
f"release-artifacts/{salt_version}/.release-symlinks.json"
)
ctx.info(f"Uploading {repo_symlinks_path} ...")
s3.put_object(
Key=repo_symlinks_path,
Bucket=bucket_name,
Body=json.dumps(symlink_paths).encode(),
Metadata={
"x-amz-meta-salt-release-version": salt_version,
},
)
except KeyboardInterrupt:
pass

View file

@ -1,8 +1,12 @@
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
from __future__ import annotations
import json
import os
import pathlib
import sys
from datetime import datetime
from typing import Any
import packaging.version
from ptscripts import Context
@ -16,6 +20,18 @@ from rich.progress import (
TransferSpeedColumn,
)
try:
import boto3
from botocore.exceptions import ClientError
except ImportError:
print(
"\nPlease run 'python -m pip install -r "
"requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info),
file=sys.stderr,
flush=True,
)
raise
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod"
@ -169,3 +185,114 @@ def get_salt_releases(ctx: Context, repository: str) -> list[Version]:
# We're not going to parse dash or docs releases
versions.add(Version(name))
return sorted(versions)
def parse_versions(*versions: str) -> list[Version]:
_versions = []
for version in set(versions):
if version == "latest":
continue
_versions.append(Version(version))
if _versions:
_versions.sort(reverse=True)
return _versions
def get_repo_json_file_contents(
ctx: Context,
bucket_name: str,
repo_path: pathlib.Path,
repo_json_path: pathlib.Path,
) -> dict[str, Any]:
s3 = boto3.client("s3")
repo_json: dict[str, Any] = {}
try:
ret = s3.head_object(
Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path))
)
ctx.info(
f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file "
f"from bucket {bucket_name}"
)
size = ret["ContentLength"]
with repo_json_path.open("wb") as wfh:
with create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj(
Bucket=bucket_name,
Key=str(repo_json_path.relative_to(repo_path)),
Fileobj=wfh,
Callback=UpdateProgress(progress, task),
)
with repo_json_path.open() as rfh:
repo_json = json.load(rfh)
except ClientError as exc:
if "Error" not in exc.response:
raise
if exc.response["Error"]["Code"] != "404":
raise
ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}")
if repo_json:
ctx.print(repo_json, soft_wrap=True)
return repo_json
def create_top_level_repo_path(
ctx: Context,
repo_path: pathlib.Path,
salt_version: str,
distro: str,
distro_version: str | None = None, # pylint: disable=bad-whitespace
distro_arch: str | None = None, # pylint: disable=bad-whitespace
nightly_build_from: str | None = None, # pylint: disable=bad-whitespace
):
create_repo_path = repo_path
if nightly_build_from:
create_repo_path = (
create_repo_path
/ "salt-dev"
/ nightly_build_from
/ datetime.utcnow().strftime("%Y-%m-%d")
)
create_repo_path.mkdir(exist_ok=True, parents=True)
with ctx.chdir(create_repo_path.parent):
latest_nightly_symlink = pathlib.Path("latest")
if not latest_nightly_symlink.exists():
ctx.info(
f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..."
)
latest_nightly_symlink.symlink_to(
create_repo_path.name, target_is_directory=True
)
elif "rc" in salt_version:
create_repo_path = create_repo_path / "salt_rc"
create_repo_path = create_repo_path / "salt" / "py3" / distro
if distro_version:
create_repo_path = create_repo_path / distro_version
if distro_arch:
create_repo_path = create_repo_path / distro_arch
create_repo_path.mkdir(exist_ok=True, parents=True)
return create_repo_path
def create_full_repo_path(
ctx: Context,
repo_path: pathlib.Path,
salt_version: str,
distro: str,
distro_version: str | None = None, # pylint: disable=bad-whitespace
distro_arch: str | None = None, # pylint: disable=bad-whitespace
nightly_build_from: str | None = None, # pylint: disable=bad-whitespace
):
create_repo_path = create_top_level_repo_path(
ctx,
repo_path,
salt_version,
distro,
distro_version,
distro_arch,
nightly_build_from=nightly_build_from,
)
create_repo_path = create_repo_path / "minor" / salt_version
create_repo_path.mkdir(exist_ok=True, parents=True)
return create_repo_path