Complete the release process

Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
Pedro Algarvio 2023-02-11 07:05:43 +00:00 committed by Pedro Algarvio
parent 2a5f76f38f
commit df1720d341
3 changed files with 269 additions and 10 deletions

View file

@ -51,19 +51,79 @@ jobs:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
fetch-depth: 0 # Full clone to also get the tags
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
--query SecretString --output text | jq .default_key -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
--query SecretString --output text| jq .default_passphrase -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Configure Git
shell: bash
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
git config --global user.signingkey 64CBBC8173D76B3F
git config --global commit.gpgsign true
- name: Download The Release Artifacts
- name: Release Salt ${{ inputs.salt-version }}
id: release
run: |
aws --region us-west-2 s3 cp --recursive s3://salt-project-prod-salt-artifacts-staging/release-artifacts/${{ inputs.salt-version }}/ release-artifacts
- name: Apply Release Patch
shell: bash
tools pkg repo publish release ${{ inputs.salt-version }}
- name: Apply The Release Patch
run: |
git am --committer-date-is-author-date release-artifacts/salt-${{ inputs.salt-version }}.patch
rm release-artifacts/salt-${{ inputs.salt-version }}.patch
- name: Tag The v${{ inputs.salt-version }} Release
run: |
git tag -m "Release v${{ inputs.salt-version }}" -as v${{ inputs.salt-version }}
- name: Push Changes
uses: ad-m/github-push-action@v0.6.0
with:
ssh: true
tags: true
atomic: true
- name: Create Github Release
uses: ncipollo/release-action@v1.12.0
with:
artifactErrorsFailBuild: true
artifacts: ${{ steps.release.outputs.release-artifacts }}
bodyFile: ${{ steps.release.outputs.release-messsage-file }}
draft: false
generateReleaseNotes: false
makeLatest: ${{ steps.release.outputs.make-latest }}
name: v${{ inputs.salt-version }}
prerelease: ${{ contains(inputs.salt-version, 'rc') }}
removeArtifacts: true
replacesArtifacts: true
tag: v${{ inputs.salt-version }}
set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks

View file

@ -64,19 +64,80 @@ permissions:
ssh-key: ${{ secrets.GHA_SSH_KEY }}
fetch-depth: 0 # Full clone to also get the tags
- name: Setup Python Tools Scripts
uses: ./.github/actions/setup-python-tools-scripts
- name: Setup GnuPG
run: |
sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg
GNUPGHOME="$(mktemp -d -p /run/gpg)"
echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV"
cat <<EOF > "${GNUPGHOME}/gpg.conf"
batch
no-tty
pinentry-mode loopback
EOF
- name: Get Secrets
env:
SECRETS_KEY: ${{ secrets.SECRETS_KEY }}
run: |
SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX)
echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE"
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
--query SecretString --output text | jq .default_key -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -d - \
| gpg --import -
aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/signing/repo-signing-keys-sha256-2023 \
--query SecretString --output text| jq .default_passphrase -r | base64 -d \
| gpg --passphrase-file "${SECRETS_KEY_FILE}" -o "${GNUPGHOME}/passphrase" -d -
rm "$SECRETS_KEY_FILE"
echo "passphrase-file ${GNUPGHOME}/passphrase" >> "${GNUPGHOME}/gpg.conf"
- name: Configure Git
shell: bash
run: |
git config --global user.name "Salt Project Packaging"
git config --global user.email saltproject-packaging@vmware.com
git config --global user.signingkey 64CBBC8173D76B3F
git config --global commit.gpgsign true
- name: Download The Release Artifacts
- name: Release Salt ${{ inputs.salt-version }}
id: release
run: |
aws --region us-west-2 s3 cp --recursive s3://salt-project-prod-salt-artifacts-staging/release-artifacts/${{ inputs.salt-version }}/ release-artifacts
- name: Apply Release Patch
shell: bash
tools pkg repo publish release ${{ inputs.salt-version }}
- name: Apply The Release Patch
run: |
git am --committer-date-is-author-date release-artifacts/salt-${{ inputs.salt-version }}.patch
rm release-artifacts/salt-${{ inputs.salt-version }}.patch
- name: Tag The v${{ inputs.salt-version }} Release
run: |
git tag -m "Release v${{ inputs.salt-version }}" -as v${{ inputs.salt-version }}
- name: Push Changes
uses: ad-m/github-push-action@v0.6.0
with:
ssh: true
tags: true
atomic: true
- name: Create Github Release
uses: ncipollo/release-action@v1.12.0
with:
artifactErrorsFailBuild: true
artifacts: ${{ steps.release.outputs.release-artifacts }}
bodyFile: ${{ steps.release.outputs.release-messsage-file }}
draft: false
generateReleaseNotes: false
makeLatest: ${{ steps.release.outputs.make-latest }}
name: v${{ inputs.salt-version }}
prerelease: ${{ contains(inputs.salt-version, 'rc') }}
removeArtifacts: true
replacesArtifacts: true
tag: v${{ inputs.salt-version }}
<%- endblock pre_jobs %>

View file

@ -835,12 +835,21 @@ def staging(ctx: Context, repo_path: pathlib.Path, rc_build: bool = False):
"rc_build": {
"help": "Release Candidate repository target",
},
"key_id": {
"help": "The GnuPG key ID used to sign.",
"required": True,
},
}
)
def release(ctx: Context, salt_version: str, rc_build: bool = False):
def release(
ctx: Context, salt_version: str, key_id: str = None, rc_build: bool = False
):
"""
Publish to the release bucket.
"""
if TYPE_CHECKING:
assert key_id is not None
if rc_build:
bucket_folder = "salt_rc/py3"
else:
@ -850,7 +859,7 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
files_to_delete: list[str] = []
files_to_duplicate: list[tuple[str, str]] = []
ctx.info("Grabing remote file listing of files to copy...")
ctx.info("Grabbing remote file listing of files to copy...")
glob_match = f"{bucket_folder}/**/minor/{salt_version}/**"
files_to_copy = _get_repo_file_list(
@ -890,6 +899,10 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
if "onedir" not in onedir_listing:
onedir_listing["onedir"] = []
onedir_listing["onedir"].append(fpath)
else:
if "package" not in onedir_listing:
onedir_listing["package"] = []
onedir_listing["package"].append(fpath)
ctx.info(f" * Copying {fpath}")
try:
s3.copy_object(
@ -909,6 +922,8 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
progress.update(task, advance=1)
# Now let's get the onedir based repositories where we need to update several repo.json
update_latest = False
update_minor = False
major_version = packaging.version.parse(salt_version).major
with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd:
repo_path = pathlib.Path(tsd)
@ -973,6 +988,7 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
release_minor_repo_json[salt_version] = release_json
if latest_version <= salt_version:
update_latest = True
release_repo_json["latest"] = release_json
glob_match = f"{bucket_folder}/{distro}/**/latest/**"
files_to_delete.extend(
@ -988,6 +1004,7 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
)
if latest_minor_version <= salt_version:
update_minor = True
release_minor_repo_json["latest"] = release_json
glob_match = f"{bucket_folder}/{distro}/**/{major_version}/**"
files_to_delete.extend(
@ -1012,6 +1029,44 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
ctx.info(f"Writing {repo_json_path} ...")
repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True))
# Now lets handle latest and minor updates for non one dir based repositories
onedir_based_paths = (
f"{bucket_folder}/windows/",
f"{bucket_folder}/macos/",
f"{bucket_folder}/onedir/",
)
if update_latest:
glob_match = f"{bucket_folder}/**/latest/**"
for fpath in _get_repo_file_list(
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
bucket_folder=bucket_folder,
glob_match=glob_match,
):
if fpath.startswith(onedir_based_paths):
continue
files_to_delete.append(fpath)
for fpath in onedir_listing["package"]:
files_to_duplicate.append(
(fpath, fpath.replace(f"minor/{salt_version}", "latest"))
)
if update_minor:
glob_match = f"{bucket_folder}/**/{major_version}/**"
for fpath in _get_repo_file_list(
bucket_name=tools.utils.RELEASE_BUCKET_NAME,
bucket_folder=bucket_folder,
glob_match=glob_match,
):
if fpath.startswith(onedir_based_paths):
continue
files_to_delete.append(fpath)
for fpath in onedir_listing["package"]:
files_to_duplicate.append(
(fpath, fpath.replace(f"minor/{salt_version}", str(major_version)))
)
if files_to_delete:
with tools.utils.create_progress_bar() as progress:
task = progress.add_task(
@ -1069,6 +1124,89 @@ def release(ctx: Context, salt_version: str, rc_build: bool = False):
Callback=tools.utils.UpdateProgress(progress, task),
)
# Let's now download the release artifacts stored in staging
artifacts_path = pathlib.Path.cwd() / "release-artifacts"
artifacts_path.mkdir(exist_ok=True)
release_artifacts_listing: dict[pathlib.Path, int] = {}
continuation_token = None
while True:
kwargs: dict[str, str] = {}
if continuation_token:
kwargs["ContinuationToken"] = continuation_token
ret = s3.list_objects_v2(
Bucket=tools.utils.STAGING_BUCKET_NAME,
Prefix=f"release-artifacts/{salt_version}",
FetchOwner=False,
**kwargs,
)
contents = ret.pop("Contents", None)
if contents is None:
break
for entry in contents:
entry_path = pathlib.Path(entry["Key"])
release_artifacts_listing[entry_path] = entry["Size"]
if not ret["IsTruncated"]:
break
continuation_token = ret["NextContinuationToken"]
for entry_path, size in release_artifacts_listing.items():
ctx.info(f" * {entry_path.name}")
local_path = artifacts_path / entry_path.name
with local_path.open("wb") as wfh:
with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj(
Bucket=tools.utils.STAGING_BUCKET_NAME,
Key=str(entry_path),
Fileobj=wfh,
Callback=tools.utils.UpdateProgress(progress, task),
)
for artifact in artifacts_path.iterdir():
if artifact.suffix == ".patch":
continue
tools.utils.gpg_sign(ctx, key_id, artifact)
# Export the GPG key in use
tools.utils.export_gpg_key(ctx, key_id, artifacts_path)
release_message = f"""\
# Welcome to Salt v{salt_version}
* For the latest release notes, see: [Release notes](https://docs.saltproject.io/en/latest/topics/releases/{salt_version}.html)
* For installation instructions, go to the [Salt install guide](https://docs.saltproject.io/salt/install-guide/en/latest/index.html)
* To access packages for the latest releases, go to the [Salt repository](https://repo.saltproject.io/)
The Salt Project Team.
"""
release_message_path = artifacts_path / "gh-release-body.md"
release_message_path.write_text(textwrap.dedent(release_message).strip())
github_output = os.environ.get("GITHUB_OUTPUT")
if github_output is None:
ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.")
ctx.exit(0)
if TYPE_CHECKING:
assert github_output is not None
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"release-messsage-file={release_message_path.resolve()}\n")
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"make-latest={json.dumps(update_latest)}\n")
artifacts_to_upload = []
for artifact in artifacts_path.iterdir():
if artifact.suffix == ".patch":
continue
if artifact.name == release_message_path.name:
continue
artifacts_to_upload.append(str(artifact.resolve()))
with open(github_output, "a", encoding="utf-8") as wfh:
wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n")
ctx.exit(0)
def _get_repo_file_list(
bucket_name: str, bucket_folder: str, glob_match: str