From 82ba2a5da83206710b0f3de1dee771085f34a075 Mon Sep 17 00:00:00 2001 From: Tanmoy037 Date: Sun, 21 May 2023 11:45:59 +0530 Subject: [PATCH 01/25] remove the header comment about masterless --- doc/ref/configuration/minion.rst | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst index 69c7afbde84..256951d2849 100644 --- a/doc/ref/configuration/minion.rst +++ b/doc/ref/configuration/minion.rst @@ -2035,7 +2035,6 @@ Valid options: Top File Settings ================= -These parameters only have an effect if running a masterless minion. .. conf_minion:: state_top From b713c3441b45914a7ca8eefe64df5074eddf67c9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 5 May 2023 22:08:19 +0100 Subject: [PATCH 02/25] Pass the `LATEST_SALT_RELEASE` environment variables through to the VM Signed-off-by: Pedro Algarvio --- .github/workflows/test-package-downloads-action-linux.yml | 4 ++-- .github/workflows/test-package-downloads-action-windows.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test-package-downloads-action-linux.yml b/.github/workflows/test-package-downloads-action-linux.yml index ee67c4d4020..7df9ec1c8f3 100644 --- a/.github/workflows/test-package-downloads-action-linux.yml +++ b/.github/workflows/test-package-downloads-action-linux.yml @@ -224,7 +224,7 @@ jobs: run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \ --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs - name: Run Package Download Tests @@ -241,7 +241,7 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \ --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs - name: Combine Coverage Reports diff --git a/.github/workflows/test-package-downloads-action-windows.yml b/.github/workflows/test-package-downloads-action-windows.yml index 10d4462e451..963372925d2 100644 --- a/.github/workflows/test-package-downloads-action-windows.yml +++ b/.github/workflows/test-package-downloads-action-windows.yml @@ -234,7 +234,7 @@ jobs: run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \ --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} -- download-pkgs - name: Run Package Download Tests @@ -252,7 +252,7 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ -E INSTALL_TYPE -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ - -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE \ --nox-session=${{ inputs.nox-session }} --rerun-failures ${{ inputs.distro-slug }} -- download-pkgs - name: Combine Coverage Reports From 75a93eefc3d2b606ff00cc8764b1cbbbffe710c2 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 2 May 2023 20:53:24 -0400 Subject: [PATCH 03/25] Refactor the `tools pkg repo` commands into a subdirectory --- tools/__init__.py | 2 + tools/pkg/repo.py | 1906 ------------------------------------ tools/pkg/repo/__init__.py | 181 ++++ tools/pkg/repo/create.py | 1038 ++++++++++++++++++++ tools/pkg/repo/publish.py | 653 ++++++++++++ tools/utils.py | 127 +++ 6 files changed, 2001 insertions(+), 1906 deletions(-) delete mode 100644 tools/pkg/repo.py create mode 100644 tools/pkg/repo/__init__.py create mode 100644 tools/pkg/repo/create.py create mode 100644 tools/pkg/repo/publish.py diff --git a/tools/__init__.py b/tools/__init__.py index 419ec309c2f..02e6b8de903 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -8,6 +8,8 @@ ptscripts.register_tools_module("tools.docs") ptscripts.register_tools_module("tools.pkg") ptscripts.register_tools_module("tools.pkg.repo") ptscripts.register_tools_module("tools.pkg.build") +ptscripts.register_tools_module("tools.pkg.repo.create") +ptscripts.register_tools_module("tools.pkg.repo.publish") ptscripts.register_tools_module("tools.pre_commit") ptscripts.register_tools_module("tools.release") ptscripts.register_tools_module("tools.vm") diff --git a/tools/pkg/repo.py b/tools/pkg/repo.py deleted file mode 100644 index d781cf3c8ff..00000000000 --- a/tools/pkg/repo.py +++ /dev/null @@ -1,1906 +0,0 @@ -""" -These commands are used to build the pacakge repository files. -""" -# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated -from __future__ import annotations - -import fnmatch -import hashlib -import json -import logging -import os -import pathlib -import re -import shutil -import sys -import tempfile -import textwrap -from datetime import datetime -from typing import TYPE_CHECKING, Any - -import packaging.version -from ptscripts import Context, command_group - -import tools.pkg -import tools.utils -from tools.utils import Version, get_salt_releases - -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - -log = logging.getLogger(__name__) - -# Define the command group -repo = command_group( - name="repo", - help="Packaging Repository Related Commands", - description=__doc__, - parent="pkg", -) - -create = command_group( - name="create", help="Packaging Repository Creation Related Commands", parent=repo -) - -publish = command_group( - name="publish", - help="Packaging Repository Publication Related Commands", - parent=repo, -) - - -_deb_distro_info = { - "debian": { - "10": { - "label": "deb10ary", - "codename": "buster", - "suitename": "oldstable", - }, - "11": { - "label": "deb11ary", - "codename": "bullseye", - "suitename": "stable", - }, - }, - "ubuntu": { - "20.04": { - "label": "salt_ubuntu2004", - "codename": "focal", - }, - "22.04": { - "label": "salt_ubuntu2204", - "codename": "jammy", - }, - }, -} - - -@create.command( - name="deb", - arguments={ - "salt_version": { - "help": ( - "The salt version for which to build the repository configuration files. " - "If not passed, it will be discovered by running 'python3 salt/version.py'." - ), - "required": True, - }, - "distro": { - "help": "The debian based distribution to build the repository for", - "choices": list(_deb_distro_info), - "required": True, - }, - "distro_version": { - "help": "The distro version.", - "required": True, - }, - "distro_arch": { - "help": "The distribution architecture", - "choices": ("x86_64", "amd64", "aarch64", "arm64"), - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def debian( - ctx: Context, - salt_version: str = None, - distro: str = None, - distro_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - distro_arch: str = "amd64", - nightly_build_from: str = None, -): - """ - Create the debian repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert distro is not None - assert distro_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - display_name = f"{distro.capitalize()} {distro_version}" - if distro_version not in _deb_distro_info[distro]: - ctx.error(f"Support for {display_name} is missing.") - ctx.exit(1) - - if distro_arch == "x86_64": - ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.") - distro_arch = "amd64" - - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - - distro_details = _deb_distro_info[distro][distro_version] - - ctx.info("Distribution Details:") - ctx.info(distro_details) - if TYPE_CHECKING: - assert isinstance(distro_details["label"], str) - assert isinstance(distro_details["codename"], str) - assert isinstance(distro_details["suitename"], str) - label: str = distro_details["label"] - codename: str = distro_details["codename"] - - ftp_archive_config_suite = "" - if distro == "debian": - suitename: str = distro_details["suitename"] - ftp_archive_config_suite = ( - f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" - ) - archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" - ftp_archive_config = f"""\ - APT::FTPArchive::Release::Origin "SaltProject"; - APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} - APT::FTPArchive::Release::Codename "{codename}"; - APT::FTPArchive::Release::Architectures "{distro_arch}"; - APT::FTPArchive::Release::Components "main"; - APT::FTPArchive::Release::Description "{archive_description}"; - APT::FTPArchive::Release::Acquire-By-Hash "yes"; - Dir {{ - ArchiveDir "."; - }}; - BinDirectory "pool" {{ - Packages "dists/{codename}/main/binary-{distro_arch}/Packages"; - Sources "dists/{codename}/main/source/Sources"; - Contents "dists/{codename}/main/Contents-{distro_arch}"; - }} - """ - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" - ctx.info(f"Writing {ftp_archive_config_file} ...") - ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - pool_path = create_repo_path / "pool" - pool_path.mkdir(exist_ok=True) - for fpath in incoming.iterdir(): - dpath = pool_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if fpath.suffix == ".dsc": - ctx.info(f"Running 'debsign' on {dpath} ...") - ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True) - - dists_path = create_repo_path / "dists" - symlink_parent_path = dists_path / codename / "main" - symlink_paths = ( - symlink_parent_path / "by-hash" / "SHA256", - symlink_parent_path / "source" / "by-hash" / "SHA256", - symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256", - ) - - for path in symlink_paths: - path.mkdir(exist_ok=True, parents=True) - - cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - - ctx.info("Creating by-hash symlinks ...") - for path in symlink_paths: - for fpath in path.parent.parent.iterdir(): - if not fpath.is_file(): - continue - sha256sum = ctx.run("sha256sum", str(fpath), capture=True) - link = path / sha256sum.stdout.decode().split()[0] - link.symlink_to(f"../../{fpath.name}") - - cmdline = [ - "apt-ftparchive", - "--no-md5", - "--no-sha1", - "--no-sha512", - "release", - "-c", - "apt-ftparchive.conf", - f"dists/{codename}/", - ] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path) - release_file = dists_path / codename / "Release" - ctx.info(f"Writing {release_file} with the output of the previous command...") - release_file.write_bytes(ret.stdout) - - cmdline = [ - "gpg", - "-u", - key_id, - "-o", - f"dists/{codename}/InRelease", - "-a", - "-s", - "--clearsign", - f"dists/{codename}/Release", - ] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - - cmdline = [ - "gpg", - "-u", - key_id, - "-o", - f"dists/{codename}/Release.gpg", - "-a", - "-b", - "-s", - f"dists/{codename}/Release", - ] - - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - if not nightly_build_from: - remote_versions = _get_remote_versions( - tools.utils.STAGING_BUCKET_NAME, - create_repo_path.parent.relative_to(repo_path), - ) - major_version = Version(salt_version).major - matching_major = None - for version in remote_versions: - if version.major == major_version: - matching_major = version - break - if not matching_major or matching_major <= salt_version: - major_link = create_repo_path.parent.parent / str(major_version) - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - major_link.symlink_to(f"minor/{salt_version}") - if not remote_versions or remote_versions[0] <= salt_version: - latest_link = create_repo_path.parent.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(f"minor/{salt_version}") - - ctx.info("Done") - - -_rpm_distro_info = { - "amazon": ["2"], - "redhat": ["7", "8", "9"], - "fedora": ["36", "37", "38"], - "photon": ["3", "4"], -} - - -@create.command( - name="rpm", - arguments={ - "salt_version": { - "help": ( - "The salt version for which to build the repository configuration files. " - "If not passed, it will be discovered by running 'python3 salt/version.py'." - ), - "required": True, - }, - "distro": { - "help": "The debian based distribution to build the repository for", - "choices": list(_rpm_distro_info), - "required": True, - }, - "distro_version": { - "help": "The distro version.", - "required": True, - }, - "distro_arch": { - "help": "The distribution architecture", - "choices": ("x86_64", "aarch64", "arm64"), - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def rpm( - ctx: Context, - salt_version: str = None, - distro: str = None, - distro_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - distro_arch: str = "amd64", - nightly_build_from: str = None, -): - """ - Create the redhat repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert distro is not None - assert distro_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - display_name = f"{distro.capitalize()} {distro_version}" - if distro_version not in _rpm_distro_info[distro]: - ctx.error(f"Support for {display_name} is missing.") - ctx.exit(1) - - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - nightly_build_from=nightly_build_from, - ) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - for fpath in incoming.iterdir(): - if ".src" in fpath.suffixes: - dpath = create_repo_path / "SRPMS" / fpath.name - else: - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if fpath.suffix == ".rpm": - ctx.info(f"Running 'rpmsign' on {dpath} ...") - ctx.run( - "rpmsign", - "--key-id", - key_id, - "--addsign", - "--digest-algo=sha256", - str(dpath), - ) - - createrepo = shutil.which("createrepo") - if createrepo is None: - container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" - ctx.info(f"Using docker container '{container}' to call 'createrepo'...") - uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() - gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() - ctx.run( - "docker", - "run", - "--rm", - "-v", - f"{create_repo_path.resolve()}:/code", - "-u", - f"{uid}:{gid}", - "-w", - "/code", - container, - "createrepo", - ".", - ) - else: - ctx.run("createrepo", ".", cwd=create_repo_path) - - if nightly_build_from: - repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") - else: - repo_domain = os.environ.get( - "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io" - ) - - salt_repo_user = os.environ.get("SALT_REPO_USER") - if salt_repo_user: - log.info( - "SALT_REPO_USER: %s", - salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1], - ) - salt_repo_pass = os.environ.get("SALT_REPO_PASS") - if salt_repo_pass: - log.info( - "SALT_REPO_PASS: %s", - salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1], - ) - if salt_repo_user and salt_repo_pass: - repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}" - - def _create_repo_file(create_repo_path, url_suffix): - ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") - if nightly_build_from: - base_url = f"salt-dev/{nightly_build_from}/" - repo_file_contents = "[salt-nightly-repo]" - elif "rc" in salt_version: - base_url = "salt_rc/" - repo_file_contents = "[salt-rc-repo]" - else: - base_url = "" - repo_file_contents = "[salt-repo]" - base_url += f"salt/py3/{distro}/{distro_version}/{distro_arch}/{url_suffix}" - if distro == "amazon": - distro_name = "Amazon Linux" - elif distro == "redhat": - distro_name = "RHEL/CentOS" - else: - distro_name = distro.capitalize() - - if distro != "photon" and int(distro_version) < 8: - failovermethod = "\n failovermethod=priority" - else: - failovermethod = "" - - repo_file_contents += textwrap.dedent( - f""" - name=Salt repo for {distro_name} {distro_version} PY3 - baseurl=https://{repo_domain}/{base_url} - skip_if_unavailable=True{failovermethod} - priority=10 - enabled=1 - enabled_metadata=1 - gpgcheck=1 - gpgkey=https://{repo_domain}/{base_url}/{tools.utils.GPG_KEY_FILENAME}.pub - """ - ) - create_repo_path.write_text(repo_file_contents) - - if nightly_build_from: - repo_file_path = create_repo_path.parent / "nightly.repo" - else: - repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" - - _create_repo_file(repo_file_path, f"minor/{salt_version}") - - if not nightly_build_from: - remote_versions = _get_remote_versions( - tools.utils.STAGING_BUCKET_NAME, - create_repo_path.parent.relative_to(repo_path), - ) - major_version = Version(salt_version).major - matching_major = None - for version in remote_versions: - if version.major == major_version: - matching_major = version - break - if not matching_major or matching_major <= salt_version: - major_link = create_repo_path.parent.parent / str(major_version) - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - major_link.symlink_to(f"minor/{salt_version}") - repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo" - _create_repo_file(repo_file_path, str(major_version)) - if not remote_versions or remote_versions[0] <= salt_version: - latest_link = create_repo_path.parent.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(f"minor/{salt_version}") - repo_file_path = create_repo_path.parent.parent / "latest.repo" - _create_repo_file(repo_file_path, "latest") - - ctx.info("Done") - - -@create.command( - name="windows", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def windows( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the windows repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build_from=nightly_build_from, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="windows", - pkg_suffixes=(".msi", ".exe"), - ) - ctx.info("Done") - - -@create.command( - name="macos", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def macos( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the windows repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build_from=nightly_build_from, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="macos", - pkg_suffixes=(".pkg",), - ) - ctx.info("Done") - - -@create.command( - name="onedir", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def onedir( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the onedir repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build_from=nightly_build_from, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="onedir", - pkg_suffixes=(".xz", ".zip"), - ) - ctx.info("Done") - - -@create.command( - name="src", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build_from": { - "help": "Developement repository target", - }, - }, -) -def src( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build_from: str = None, -): - """ - Create the onedir repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro="src", - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - create_repo_path = create_repo_path / salt_version - create_repo_path.mkdir(exist_ok=True, parents=True) - hashes_base_path = create_repo_path / f"salt-{salt_version}" - for fpath in incoming.iterdir(): - if fpath.suffix not in (".gz",): - continue - ctx.info(f"* Processing {fpath} ...") - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - for hash_name in ("blake2b", "sha512", "sha3_512"): - ctx.info(f" * Calculating {hash_name} ...") - hexdigest = _get_file_checksum(fpath, hash_name) - with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - with open(f"{dpath}.{hash_name}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - - for fpath in create_repo_path.iterdir(): - if fpath.suffix in (".pub", ".gpg"): - continue - tools.utils.gpg_sign(ctx, key_id, fpath) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - ctx.info("Done") - - -@publish.command( - arguments={ - "repo_path": { - "help": "Local path for the repository that shall be published.", - }, - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - } -) -def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): - """ - Publish to the nightly bucket. - """ - if TYPE_CHECKING: - assert salt_version is not None - _publish_repo( - ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version - ) - - -@publish.command( - arguments={ - "repo_path": { - "help": "Local path for the repository that shall be published.", - }, - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - } -) -def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): - """ - Publish to the staging bucket. - """ - if TYPE_CHECKING: - assert salt_version is not None - _publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version) - - -@repo.command(name="backup-previous-releases") -def backup_previous_releases(ctx: Context): - """ - Backup release bucket. - """ - _rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME) - ctx.info("Done") - - -@repo.command(name="restore-previous-releases") -def restore_previous_releases(ctx: Context): - """ - Restore release bucket from backup. - """ - _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) - github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is not None: - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"backup-complete=true\n") - ctx.info("Done") - - -def _rclone(ctx: Context, src: str, dst: str): - rclone = shutil.which("rclone") - if not rclone: - ctx.error("Could not find the rclone binary") - ctx.exit(1) - - if TYPE_CHECKING: - assert rclone - - env = os.environ.copy() - env["RCLONE_CONFIG_S3_TYPE"] = "s3" - cmdline: list[str] = [ - rclone, - "sync", - "--auto-confirm", - "--human-readable", - "--checksum", - "--color=always", - "--metadata", - "--s3-env-auth", - "--s3-location-constraint=us-west-2", - "--s3-provider=AWS", - "--s3-region=us-west-2", - "--stats-file-name-length=0", - "--stats-one-line", - "--stats=5s", - "--transfers=50", - "--fast-list", - "--verbose", - ] - if src == tools.utils.RELEASE_BUCKET_NAME: - cmdline.append("--s3-storage-class=INTELLIGENT_TIERING") - cmdline.extend([f"s3://{src}", f"s3://{dst}"]) - ctx.info(f"Running: {' '.join(cmdline)}") - ret = ctx.run(*cmdline, env=env, check=False) - if ret.returncode: - ctx.error(f"Failed to sync from s3://{src} to s3://{dst}") - ctx.exit(1) - - -@publish.command( - arguments={ - "salt_version": { - "help": "The salt version to release.", - }, - } -) -def release(ctx: Context, salt_version: str): - """ - Publish to the release bucket. - """ - if "rc" in salt_version: - bucket_folder = "salt_rc/salt/py3" - else: - bucket_folder = "salt/py3" - - files_to_copy: list[str] - directories_to_delete: list[str] = [] - - ctx.info("Grabbing remote file listing of files to copy...") - s3 = boto3.client("s3") - repo_release_files_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-files.json" - ) - repo_release_symlinks_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-symlinks.json" - ) - with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: - local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name - try: - bucket_name = tools.utils.STAGING_BUCKET_NAME - with local_release_files_path.open("wb") as wfh: - ctx.info( - f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." - ) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_release_files_path), - Fileobj=wfh, - ) - files_to_copy = json.loads(local_release_files_path.read_text()) - except ClientError as exc: - if "Error" not in exc.response: - log.exception(f"Error downloading {repo_release_files_path}: {exc}") - ctx.exit(1) - if exc.response["Error"]["Code"] == "404": - ctx.error(f"Could not find {repo_release_files_path} in bucket.") - ctx.exit(1) - if exc.response["Error"]["Code"] == "400": - ctx.error( - f"Could not download {repo_release_files_path} from bucket: {exc}" - ) - ctx.exit(1) - log.exception(f"Error downloading {repo_release_files_path}: {exc}") - ctx.exit(1) - local_release_symlinks_path = ( - pathlib.Path(tsd) / repo_release_symlinks_path.name - ) - try: - with local_release_symlinks_path.open("wb") as wfh: - ctx.info( - f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." - ) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_release_symlinks_path), - Fileobj=wfh, - ) - directories_to_delete = json.loads(local_release_symlinks_path.read_text()) - except ClientError as exc: - if "Error" not in exc.response: - log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") - ctx.exit(1) - if exc.response["Error"]["Code"] == "404": - ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.") - ctx.exit(1) - if exc.response["Error"]["Code"] == "400": - ctx.error( - f"Could not download {repo_release_symlinks_path} from bucket: {exc}" - ) - ctx.exit(1) - log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") - ctx.exit(1) - - if directories_to_delete: - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Deleting directories to override.", - total=len(directories_to_delete), - ) - for directory in directories_to_delete: - try: - objects_to_delete: list[dict[str, str]] = [] - for path in _get_repo_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=f"{directory}/**", - ): - objects_to_delete.append({"Key": path}) - if objects_to_delete: - s3.delete_objects( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Delete={"Objects": objects_to_delete}, - ) - except ClientError: - log.exception("Failed to delete remote files") - finally: - progress.update(task, advance=1) - - already_copied_files: list[str] = [] - s3 = boto3.client("s3") - dot_repo_files = [] - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Copying files between buckets", total=len(files_to_copy) - ) - for fpath in files_to_copy: - if fpath in already_copied_files: - continue - if fpath.endswith(".repo"): - dot_repo_files.append(fpath) - ctx.info(f" * Copying {fpath}") - try: - s3.copy_object( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Key=fpath, - CopySource={ - "Bucket": tools.utils.STAGING_BUCKET_NAME, - "Key": fpath, - }, - MetadataDirective="COPY", - TaggingDirective="COPY", - ServerSideEncryption="AES256", - ) - already_copied_files.append(fpath) - except ClientError: - log.exception(f"Failed to copy {fpath}") - finally: - progress.update(task, advance=1) - - # Now let's get the onedir based repositories where we need to update several repo.json - major_version = packaging.version.parse(salt_version).major - with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: - repo_path = pathlib.Path(tsd) - for distro in ("windows", "macos", "onedir"): - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro=distro, - ) - repo_json_path = create_repo_path.parent.parent / "repo.json" - - release_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=repo_json_path, - ) - minor_repo_json_path = create_repo_path.parent / "repo.json" - - staging_minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.STAGING_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - release_minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - - release_json = staging_minor_repo_json[salt_version] - - major_version = Version(salt_version).major - versions = _parse_versions(*list(release_minor_repo_json)) - ctx.info( - f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in versions)}" - ) - minor_versions = [v for v in versions if v.major == major_version] - ctx.info( - f"Collected versions(Matching major: {major_version}) from " - f"{minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in minor_versions)}" - ) - if not versions: - latest_version = Version(salt_version) - else: - latest_version = versions[0] - if not minor_versions: - latest_minor_version = Version(salt_version) - else: - latest_minor_version = minor_versions[0] - - ctx.info(f"Release Version: {salt_version}") - ctx.info(f"Latest Repo Version: {latest_version}") - ctx.info(f"Latest Release Minor Version: {latest_minor_version}") - - # Add the minor version - release_minor_repo_json[salt_version] = release_json - - if latest_version <= salt_version: - release_repo_json["latest"] = release_json - - if latest_minor_version <= salt_version: - release_minor_repo_json["latest"] = release_json - - ctx.info(f"Writing {minor_repo_json_path} ...") - minor_repo_json_path.write_text( - json.dumps(release_minor_repo_json, sort_keys=True) - ) - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True)) - - # And now, let's get the several rpm "*.repo" files to update the base - # domain from staging to release - release_domain = os.environ.get( - "SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io" - ) - for path in dot_repo_files: - repo_file_path = repo_path.joinpath(path) - repo_file_path.parent.mkdir(exist_ok=True, parents=True) - bucket_name = tools.utils.STAGING_BUCKET_NAME - try: - ret = s3.head_object(Bucket=bucket_name, Key=path) - ctx.info( - f"Downloading existing '{repo_file_path.relative_to(repo_path)}' " - f"file from bucket {bucket_name}" - ) - size = ret["ContentLength"] - with repo_file_path.open("wb") as wfh: - with tools.utils.create_progress_bar( - file_progress=True - ) as progress: - task = progress.add_task( - description="Downloading...", total=size - ) - s3.download_fileobj( - Bucket=bucket_name, - Key=path, - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - updated_contents = re.sub( - r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$", - rf"\1=https://{release_domain}/\3", - repo_file_path.read_text(), - flags=re.MULTILINE, - ) - ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:") - ctx.print(updated_contents) - repo_file_path.write_text(updated_contents) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}") - - for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): - for path in filenames: - upload_path = pathlib.Path(dirpath, path) - relpath = upload_path.relative_to(repo_path) - size = upload_path.stat().st_size - ctx.info(f" {relpath}") - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Uploading...", total=size) - s3.upload_file( - str(upload_path), - tools.utils.RELEASE_BUCKET_NAME, - str(relpath), - Callback=tools.utils.UpdateProgress(progress, task), - ) - - -@publish.command( - arguments={ - "salt_version": { - "help": "The salt version to release.", - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - } -) -def github( - ctx: Context, - salt_version: str, - key_id: str = None, - repository: str = "saltstack/salt", -): - """ - Publish the release on GitHub releases. - """ - if TYPE_CHECKING: - assert key_id is not None - - s3 = boto3.client("s3") - - # Let's download the release artifacts stored in staging - artifacts_path = pathlib.Path.cwd() / "release-artifacts" - artifacts_path.mkdir(exist_ok=True) - release_artifacts_listing: dict[pathlib.Path, int] = {} - continuation_token = None - while True: - kwargs: dict[str, str] = {} - if continuation_token: - kwargs["ContinuationToken"] = continuation_token - ret = s3.list_objects_v2( - Bucket=tools.utils.STAGING_BUCKET_NAME, - Prefix=f"release-artifacts/{salt_version}", - FetchOwner=False, - **kwargs, - ) - contents = ret.pop("Contents", None) - if contents is None: - break - for entry in contents: - entry_path = pathlib.Path(entry["Key"]) - if entry_path.name.startswith("."): - continue - release_artifacts_listing[entry_path] = entry["Size"] - if not ret["IsTruncated"]: - break - continuation_token = ret["NextContinuationToken"] - - for entry_path, size in release_artifacts_listing.items(): - ctx.info(f" * {entry_path.name}") - local_path = artifacts_path / entry_path.name - with local_path.open("wb") as wfh: - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Downloading...", total=size) - s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, - Key=str(entry_path), - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - - for artifact in artifacts_path.iterdir(): - if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"): - continue - tools.utils.gpg_sign(ctx, key_id, artifact) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, artifacts_path) - - release_message = f"""\ - # Welcome to Salt v{salt_version} - - | :exclamation: ATTENTION | - |:-------------------------------------------------------------------------------------------------------------------------| - | The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. | - | Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`). - """ - release_message_path = artifacts_path / "gh-release-body.md" - release_message_path.write_text(textwrap.dedent(release_message).strip()) - - github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is None: - ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.") - ctx.exit(0) - - if TYPE_CHECKING: - assert github_output is not None - - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") - - releases = get_salt_releases(ctx, repository) - if Version(salt_version) >= releases[-1]: - make_latest = True - else: - make_latest = False - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"make-latest={json.dumps(make_latest)}\n") - - artifacts_to_upload = [] - for artifact in artifacts_path.iterdir(): - if artifact.suffix == ".patch": - continue - if artifact.name == release_message_path.name: - continue - artifacts_to_upload.append(str(artifact.resolve())) - - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n") - ctx.exit(0) - - -@repo.command( - name="confirm-unreleased", - arguments={ - "salt_version": { - "help": "The salt version to check", - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - }, -) -def confirm_unreleased( - ctx: Context, salt_version: str, repository: str = "saltstack/salt" -): - """ - Confirm that the passed version is not yet tagged and/or released. - """ - releases = get_salt_releases(ctx, repository) - if Version(salt_version) in releases: - ctx.error(f"There's already a '{salt_version}' tag or github release.") - ctx.exit(1) - ctx.info(f"Could not find a release for Salt Version '{salt_version}'") - ctx.exit(0) - - -@repo.command( - name="confirm-staged", - arguments={ - "salt_version": { - "help": "The salt version to check", - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - }, -) -def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): - """ - Confirm that the passed version has been staged for release. - """ - s3 = boto3.client("s3") - repo_release_files_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-files.json" - ) - repo_release_symlinks_path = pathlib.Path( - f"release-artifacts/{salt_version}/.release-symlinks.json" - ) - for remote_path in (repo_release_files_path, repo_release_symlinks_path): - try: - bucket_name = tools.utils.STAGING_BUCKET_NAME - ctx.info( - f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." - ) - s3.head_object( - Bucket=bucket_name, - Key=str(remote_path), - ) - except ClientError as exc: - if "Error" not in exc.response: - log.exception(f"Could not get information about {remote_path}: {exc}") - ctx.exit(1) - if exc.response["Error"]["Code"] == "404": - ctx.error(f"Could not find {remote_path} in bucket.") - ctx.exit(1) - if exc.response["Error"]["Code"] == "400": - ctx.error(f"Could get information about {remote_path}: {exc}") - ctx.exit(1) - log.exception(f"Error getting information about {remote_path}: {exc}") - ctx.exit(1) - ctx.info(f"Version {salt_version} has been staged for release") - ctx.exit(0) - - -def _get_repo_detailed_file_list( - bucket_name: str, - bucket_folder: str = "", - glob_match: str = "**", -) -> list[dict[str, Any]]: - s3 = boto3.client("s3") - listing: list[dict[str, Any]] = [] - continuation_token = None - while True: - kwargs: dict[str, str] = {} - if continuation_token: - kwargs["ContinuationToken"] = continuation_token - ret = s3.list_objects_v2( - Bucket=bucket_name, - Prefix=bucket_folder, - FetchOwner=False, - **kwargs, - ) - contents = ret.pop("Contents", None) - if contents is None: - break - for entry in contents: - if fnmatch.fnmatch(entry["Key"], glob_match): - listing.append(entry) - if not ret["IsTruncated"]: - break - continuation_token = ret["NextContinuationToken"] - return listing - - -def _get_repo_file_list( - bucket_name: str, bucket_folder: str, glob_match: str -) -> list[str]: - return [ - entry["Key"] - for entry in _get_repo_detailed_file_list( - bucket_name, bucket_folder, glob_match=glob_match - ) - ] - - -def _get_remote_versions(bucket_name: str, remote_path: str): - log.info( - "Getting remote versions from bucket %r under path: %s", - bucket_name, - remote_path, - ) - remote_path = str(remote_path) - if not remote_path.endswith("/"): - remote_path += "/" - - s3 = boto3.client("s3") - ret = s3.list_objects( - Bucket=bucket_name, - Delimiter="/", - Prefix=remote_path, - ) - if "CommonPrefixes" not in ret: - return [] - versions = [] - for entry in ret["CommonPrefixes"]: - _, version = entry["Prefix"].rstrip("/").rsplit("/", 1) - if version == "latest": - continue - versions.append(Version(version)) - versions.sort(reverse=True) - log.info("Remote versions collected: %s", versions) - return versions - - -def _create_onedir_based_repo( - ctx: Context, - salt_version: str, - nightly_build_from: str | None, - repo_path: pathlib.Path, - incoming: pathlib.Path, - key_id: str, - distro: str, - pkg_suffixes: tuple[str, ...], -): - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - nightly_build_from=nightly_build_from, - ) - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - create_repo_path = _create_repo_path( - ctx, - repo_path, - salt_version, - distro, - nightly_build_from=nightly_build_from, - ) - if not nightly_build_from: - repo_json_path = create_repo_path.parent.parent / "repo.json" - else: - repo_json_path = create_repo_path.parent / "repo.json" - - if nightly_build_from: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - else: - bucket_name = tools.utils.STAGING_BUCKET_NAME - - release_json = {} - - copy_exclusions = ( - ".blake2b", - ".sha512", - ".sha3_512", - ".BLAKE2B", - ".SHA512", - ".SHA3_512", - ".json", - ) - hashes_base_path = create_repo_path / f"salt-{salt_version}" - for fpath in incoming.iterdir(): - if fpath.suffix in copy_exclusions: - continue - ctx.info(f"* Processing {fpath} ...") - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if "-amd64" in dpath.name.lower(): - arch = "amd64" - elif "-x86_64" in dpath.name.lower(): - arch = "x86_64" - elif "-x86" in dpath.name.lower(): - arch = "x86" - elif "-aarch64" in dpath.name.lower(): - arch = "aarch64" - else: - ctx.error( - f"Cannot pickup the right architecture from the filename '{dpath.name}'." - ) - ctx.exit(1) - if distro == "onedir": - if "-onedir-linux-" in dpath.name.lower(): - release_os = "linux" - elif "-onedir-darwin-" in dpath.name.lower(): - release_os = "macos" - elif "-onedir-windows-" in dpath.name.lower(): - release_os = "windows" - else: - ctx.error( - f"Cannot pickup the right OS from the filename '{dpath.name}'." - ) - ctx.exit(1) - else: - release_os = distro - release_json[dpath.name] = { - "name": dpath.name, - "version": salt_version, - "os": release_os, - "arch": arch, - } - for hash_name in ("blake2b", "sha512", "sha3_512"): - ctx.info(f" * Calculating {hash_name} ...") - hexdigest = _get_file_checksum(fpath, hash_name) - release_json[dpath.name][hash_name.upper()] = hexdigest - with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - with open(f"{dpath}.{hash_name}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - - for fpath in create_repo_path.iterdir(): - if fpath.suffix in pkg_suffixes: - continue - tools.utils.gpg_sign(ctx, key_id, fpath) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - repo_json = _get_repo_json_file_contents( - ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path - ) - if nightly_build_from: - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) - return - - major_version = Version(salt_version).major - minor_repo_json_path = create_repo_path.parent / "repo.json" - minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=bucket_name, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - minor_repo_json[salt_version] = release_json - versions = _parse_versions(*list(minor_repo_json)) - ctx.info( - f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in versions)}" - ) - minor_versions = [v for v in versions if v.major == major_version] - ctx.info( - f"Collected versions(Matching major: {major_version}) from " - f"{minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in minor_versions)}" - ) - if not versions: - latest_version = Version(salt_version) - else: - latest_version = versions[0] - if not minor_versions: - latest_minor_version = Version(salt_version) - else: - latest_minor_version = minor_versions[0] - - ctx.info(f"Release Version: {salt_version}") - ctx.info(f"Latest Repo Version: {latest_version}") - ctx.info(f"Latest Release Minor Version: {latest_minor_version}") - - latest_link = create_repo_path.parent.parent / "latest" - if latest_version <= salt_version: - repo_json["latest"] = release_json - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - if latest_link.exists(): - latest_link.unlink() - latest_link.symlink_to(f"minor/{salt_version}") - else: - ctx.info( - f"Not creating the '{latest_link.relative_to(repo_path)}' symlink " - f"since {latest_version} > {salt_version}" - ) - - major_link = create_repo_path.parent.parent / str(major_version) - if latest_minor_version <= salt_version: - minor_repo_json["latest"] = release_json - # This is the latest minor, update the major in the top level repo.json - # to this version - repo_json[str(major_version)] = release_json - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - if major_link.exists(): - major_link.unlink() - major_link.symlink_to(f"minor/{salt_version}") - else: - ctx.info( - f"Not creating the '{major_link.relative_to(repo_path)}' symlink " - f"since {latest_minor_version} > {salt_version}" - ) - - ctx.info(f"Writing {minor_repo_json_path} ...") - minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True)) - - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) - - -def _get_repo_json_file_contents( - ctx: Context, - bucket_name: str, - repo_path: pathlib.Path, - repo_json_path: pathlib.Path, -) -> dict[str, Any]: - s3 = boto3.client("s3") - repo_json: dict[str, Any] = {} - try: - ret = s3.head_object( - Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path)) - ) - ctx.info( - f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file " - f"from bucket {bucket_name}" - ) - size = ret["ContentLength"] - with repo_json_path.open("wb") as wfh: - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Downloading...", total=size) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_json_path.relative_to(repo_path)), - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - with repo_json_path.open() as rfh: - repo_json = json.load(rfh) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}") - if repo_json: - ctx.print(repo_json, soft_wrap=True) - return repo_json - - -def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: - - with fpath.open("rb") as rfh: - try: - digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] - except AttributeError: - # Python < 3.11 - buf = bytearray(2**18) # Reusable buffer to reduce allocations. - view = memoryview(buf) - digest = getattr(hashlib, hash_name)() - while True: - size = rfh.readinto(buf) - if size == 0: - break # EOF - digest.update(view[:size]) - hexdigest: str = digest.hexdigest() - return hexdigest - - -def _publish_repo( - ctx: Context, - repo_path: pathlib.Path, - salt_version: str, - nightly_build: bool = False, - stage: bool = False, -): - """ - Publish packaging repositories. - """ - if nightly_build: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - elif stage: - bucket_name = tools.utils.STAGING_BUCKET_NAME - else: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - - ctx.info("Preparing upload ...") - s3 = boto3.client("s3") - to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {} - to_upload_paths: list[pathlib.Path] = [] - symlink_paths: list[str] = [] - uploaded_files: list[str] = [] - for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): - for dirname in dirnames: - path = pathlib.Path(dirpath, dirname) - if not path.is_symlink(): - continue - # This is a symlink, then we need to delete all files under - # that directory in S3 because S3 does not understand symlinks - # and we would end up adding files to that folder instead of - # replacing it. - try: - relpath = path.relative_to(repo_path) - ret = s3.list_objects( - Bucket=bucket_name, - Prefix=str(relpath), - ) - if "Contents" not in ret: - continue - objects = [] - for entry in ret["Contents"]: - objects.append({"Key": entry["Key"]}) - to_delete_paths[path] = objects - symlink_paths.append(str(relpath)) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - - for fpath in filenames: - path = pathlib.Path(dirpath, fpath) - to_upload_paths.append(path) - - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Deleting directories to override.", total=len(to_delete_paths) - ) - for base, objects in to_delete_paths.items(): - relpath = base.relative_to(repo_path) - bucket_uri = f"s3://{bucket_name}/{relpath}" - progress.update(task, description=f"Deleting {bucket_uri}") - try: - ret = s3.delete_objects( - Bucket=bucket_name, - Delete={"Objects": objects}, - ) - except ClientError: - log.exception(f"Failed to delete {bucket_uri}") - finally: - progress.update(task, advance=1) - - try: - ctx.info("Uploading repository ...") - for upload_path in to_upload_paths: - relpath = upload_path.relative_to(repo_path) - size = upload_path.stat().st_size - ctx.info(f" {relpath}") - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Uploading...", total=size) - s3.upload_file( - str(upload_path), - bucket_name, - str(relpath), - Callback=tools.utils.UpdateProgress(progress, task), - ExtraArgs={ - "Metadata": { - "x-amz-meta-salt-release-version": salt_version, - } - }, - ) - uploaded_files.append(str(relpath)) - if stage is True: - repo_files_path = f"release-artifacts/{salt_version}/.release-files.json" - ctx.info(f"Uploading {repo_files_path} ...") - s3.put_object( - Key=repo_files_path, - Bucket=bucket_name, - Body=json.dumps(uploaded_files).encode(), - Metadata={ - "x-amz-meta-salt-release-version": salt_version, - }, - ) - repo_symlinks_path = ( - f"release-artifacts/{salt_version}/.release-symlinks.json" - ) - ctx.info(f"Uploading {repo_symlinks_path} ...") - s3.put_object( - Key=repo_symlinks_path, - Bucket=bucket_name, - Body=json.dumps(symlink_paths).encode(), - Metadata={ - "x-amz-meta-salt-release-version": salt_version, - }, - ) - except KeyboardInterrupt: - pass - - -def _create_top_level_repo_path( - ctx: Context, - repo_path: pathlib.Path, - salt_version: str, - distro: str, - distro_version: str | None = None, # pylint: disable=bad-whitespace - distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build_from: str | None = None, # pylint: disable=bad-whitespace -): - create_repo_path = repo_path - if nightly_build_from: - create_repo_path = ( - create_repo_path - / "salt-dev" - / nightly_build_from - / datetime.utcnow().strftime("%Y-%m-%d") - ) - create_repo_path.mkdir(exist_ok=True, parents=True) - with ctx.chdir(create_repo_path.parent): - latest_nightly_symlink = pathlib.Path("latest") - if not latest_nightly_symlink.exists(): - ctx.info( - f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." - ) - latest_nightly_symlink.symlink_to( - create_repo_path.name, target_is_directory=True - ) - elif "rc" in salt_version: - create_repo_path = create_repo_path / "salt_rc" - create_repo_path = create_repo_path / "salt" / "py3" / distro - if distro_version: - create_repo_path = create_repo_path / distro_version - if distro_arch: - create_repo_path = create_repo_path / distro_arch - create_repo_path.mkdir(exist_ok=True, parents=True) - return create_repo_path - - -def _create_repo_path( - ctx: Context, - repo_path: pathlib.Path, - salt_version: str, - distro: str, - distro_version: str | None = None, # pylint: disable=bad-whitespace - distro_arch: str | None = None, # pylint: disable=bad-whitespace - nightly_build_from: str | None = None, # pylint: disable=bad-whitespace -): - create_repo_path = _create_top_level_repo_path( - ctx, - repo_path, - salt_version, - distro, - distro_version, - distro_arch, - nightly_build_from=nightly_build_from, - ) - create_repo_path = create_repo_path / "minor" / salt_version - create_repo_path.mkdir(exist_ok=True, parents=True) - return create_repo_path - - -def _parse_versions(*versions: str) -> list[Version]: - _versions = [] - for version in set(versions): - if version == "latest": - continue - _versions.append(Version(version)) - if _versions: - _versions.sort(reverse=True) - return _versions diff --git a/tools/pkg/repo/__init__.py b/tools/pkg/repo/__init__.py new file mode 100644 index 00000000000..8a3cbd9c81f --- /dev/null +++ b/tools/pkg/repo/__init__.py @@ -0,0 +1,181 @@ +""" +These commands are used to build the pacakge repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import os +import pathlib +import shutil +import sys +from typing import TYPE_CHECKING + +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import Version, get_salt_releases + +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + +log = logging.getLogger(__name__) + +# Define the command group +repo = command_group( + name="repo", + help="Packaging Repository Related Commands", + description=__doc__, + parent="pkg", +) + + +@repo.command(name="backup-previous-releases") +def backup_previous_releases(ctx: Context): + """ + Backup release bucket. + """ + _rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME) + ctx.info("Done") + + +@repo.command(name="restore-previous-releases") +def restore_previous_releases(ctx: Context): + """ + Restore release bucket from backup. + """ + _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"backup-complete=true\n") + ctx.info("Done") + + +def _rclone(ctx: Context, src: str, dst: str): + rclone = shutil.which("rclone") + if not rclone: + ctx.error("Could not find the rclone binary") + ctx.exit(1) + + if TYPE_CHECKING: + assert rclone + + env = os.environ.copy() + env["RCLONE_CONFIG_S3_TYPE"] = "s3" + cmdline: list[str] = [ + rclone, + "sync", + "--auto-confirm", + "--human-readable", + "--checksum", + "--color=always", + "--metadata", + "--s3-env-auth", + "--s3-location-constraint=us-west-2", + "--s3-provider=AWS", + "--s3-region=us-west-2", + "--stats-file-name-length=0", + "--stats-one-line", + "--stats=5s", + "--transfers=50", + "--fast-list", + "--verbose", + ] + if src == tools.utils.RELEASE_BUCKET_NAME: + cmdline.append("--s3-storage-class=INTELLIGENT_TIERING") + cmdline.extend([f"s3://{src}", f"s3://{dst}"]) + ctx.info(f"Running: {' '.join(cmdline)}") + ret = ctx.run(*cmdline, env=env, check=False) + if ret.returncode: + ctx.error(f"Failed to sync from s3://{src} to s3://{dst}") + ctx.exit(1) + + +@repo.command( + name="confirm-unreleased", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_unreleased( + ctx: Context, salt_version: str, repository: str = "saltstack/salt" +): + """ + Confirm that the passed version is not yet tagged and/or released. + """ + releases = get_salt_releases(ctx, repository) + if Version(salt_version) in releases: + ctx.error(f"There's already a '{salt_version}' tag or github release.") + ctx.exit(1) + ctx.info(f"Could not find a release for Salt Version '{salt_version}'") + ctx.exit(0) + + +@repo.command( + name="confirm-staged", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): + """ + Confirm that the passed version has been staged for release. + """ + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + for remote_path in (repo_release_files_path, repo_release_symlinks_path): + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + ctx.info( + f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." + ) + s3.head_object( + Bucket=bucket_name, + Key=str(remote_path), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Could not get information about {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error(f"Could get information about {remote_path}: {exc}") + ctx.exit(1) + log.exception(f"Error getting information about {remote_path}: {exc}") + ctx.exit(1) + ctx.info(f"Version {salt_version} has been staged for release") + ctx.exit(0) diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py new file mode 100644 index 00000000000..ec4b3331c42 --- /dev/null +++ b/tools/pkg/repo/create.py @@ -0,0 +1,1038 @@ +""" +These commands are used to build the pacakge repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import hashlib +import json +import logging +import os +import pathlib +import shutil +import sys +import textwrap +from typing import TYPE_CHECKING + +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import ( + Version, + create_full_repo_path, + create_top_level_repo_path, + get_repo_json_file_contents, + parse_versions, +) + +try: + import boto3 +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + +log = logging.getLogger(__name__) + +create = command_group( + name="create", + help="Packaging Repository Creation Related Commands", + parent=["pkg", "repo"], +) + + +_deb_distro_info = { + "debian": { + "10": { + "label": "deb10ary", + "codename": "buster", + "suitename": "oldstable", + }, + "11": { + "label": "deb11ary", + "codename": "bullseye", + "suitename": "stable", + }, + }, + "ubuntu": { + "20.04": { + "label": "salt_ubuntu2004", + "codename": "focal", + }, + "22.04": { + "label": "salt_ubuntu2204", + "codename": "jammy", + }, + }, +} + + +@create.command( + name="deb", + arguments={ + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "distro": { + "help": "The debian based distribution to build the repository for", + "choices": list(_deb_distro_info), + "required": True, + }, + "distro_version": { + "help": "The distro version.", + "required": True, + }, + "distro_arch": { + "help": "The distribution architecture", + "choices": ("x86_64", "amd64", "aarch64", "arm64"), + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def debian( + ctx: Context, + salt_version: str = None, + distro: str = None, + distro_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + distro_arch: str = "amd64", + nightly_build_from: str = None, +): + """ + Create the debian repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert distro is not None + assert distro_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + display_name = f"{distro.capitalize()} {distro_version}" + if distro_version not in _deb_distro_info[distro]: + ctx.error(f"Support for {display_name} is missing.") + ctx.exit(1) + + if distro_arch == "x86_64": + ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.") + distro_arch = "amd64" + + if distro_arch == "aarch64": + ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") + distro_arch = "arm64" + + distro_details = _deb_distro_info[distro][distro_version] + + ctx.info("Distribution Details:") + ctx.info(distro_details) + if TYPE_CHECKING: + assert isinstance(distro_details["label"], str) + assert isinstance(distro_details["codename"], str) + assert isinstance(distro_details["suitename"], str) + label: str = distro_details["label"] + codename: str = distro_details["codename"] + + ftp_archive_config_suite = "" + if distro == "debian": + suitename: str = distro_details["suitename"] + ftp_archive_config_suite = ( + f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" + ) + archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" + ftp_archive_config = f"""\ + APT::FTPArchive::Release::Origin "SaltProject"; + APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} + APT::FTPArchive::Release::Codename "{codename}"; + APT::FTPArchive::Release::Architectures "{distro_arch}"; + APT::FTPArchive::Release::Components "main"; + APT::FTPArchive::Release::Description "{archive_description}"; + APT::FTPArchive::Release::Acquire-By-Hash "yes"; + Dir {{ + ArchiveDir "."; + }}; + BinDirectory "pool" {{ + Packages "dists/{codename}/main/binary-{distro_arch}/Packages"; + Sources "dists/{codename}/main/source/Sources"; + Contents "dists/{codename}/main/Contents-{distro_arch}"; + }} + """ + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" + ctx.info(f"Writing {ftp_archive_config_file} ...") + ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + pool_path = create_repo_path / "pool" + pool_path.mkdir(exist_ok=True) + for fpath in incoming.iterdir(): + dpath = pool_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if fpath.suffix == ".dsc": + ctx.info(f"Running 'debsign' on {dpath} ...") + ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True) + + dists_path = create_repo_path / "dists" + symlink_parent_path = dists_path / codename / "main" + symlink_paths = ( + symlink_parent_path / "by-hash" / "SHA256", + symlink_parent_path / "source" / "by-hash" / "SHA256", + symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256", + ) + + for path in symlink_paths: + path.mkdir(exist_ok=True, parents=True) + + cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + + ctx.info("Creating by-hash symlinks ...") + for path in symlink_paths: + for fpath in path.parent.parent.iterdir(): + if not fpath.is_file(): + continue + sha256sum = ctx.run("sha256sum", str(fpath), capture=True) + link = path / sha256sum.stdout.decode().split()[0] + link.symlink_to(f"../../{fpath.name}") + + cmdline = [ + "apt-ftparchive", + "--no-md5", + "--no-sha1", + "--no-sha512", + "release", + "-c", + "apt-ftparchive.conf", + f"dists/{codename}/", + ] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path) + release_file = dists_path / codename / "Release" + ctx.info(f"Writing {release_file} with the output of the previous command...") + release_file.write_bytes(ret.stdout) + + cmdline = [ + "gpg", + "-u", + key_id, + "-o", + f"dists/{codename}/InRelease", + "-a", + "-s", + "--clearsign", + f"dists/{codename}/Release", + ] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + + cmdline = [ + "gpg", + "-u", + key_id, + "-o", + f"dists/{codename}/Release.gpg", + "-a", + "-b", + "-s", + f"dists/{codename}/Release", + ] + + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + if not nightly_build_from: + remote_versions = _get_remote_versions( + tools.utils.STAGING_BUCKET_NAME, + create_repo_path.parent.relative_to(repo_path), + ) + major_version = Version(salt_version).major + matching_major = None + for version in remote_versions: + if version.major == major_version: + matching_major = version + break + if not matching_major or matching_major <= salt_version: + major_link = create_repo_path.parent.parent / str(major_version) + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + major_link.symlink_to(f"minor/{salt_version}") + if not remote_versions or remote_versions[0] <= salt_version: + latest_link = create_repo_path.parent.parent / "latest" + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + latest_link.symlink_to(f"minor/{salt_version}") + + ctx.info("Done") + + +_rpm_distro_info = { + "amazon": ["2"], + "redhat": ["7", "8", "9"], + "fedora": ["36", "37", "38"], + "photon": ["3", "4"], +} + + +@create.command( + name="rpm", + arguments={ + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "distro": { + "help": "The debian based distribution to build the repository for", + "choices": list(_rpm_distro_info), + "required": True, + }, + "distro_version": { + "help": "The distro version.", + "required": True, + }, + "distro_arch": { + "help": "The distribution architecture", + "choices": ("x86_64", "aarch64", "arm64"), + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def rpm( + ctx: Context, + salt_version: str = None, + distro: str = None, + distro_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + distro_arch: str = "amd64", + nightly_build_from: str = None, +): + """ + Create the redhat repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert distro is not None + assert distro_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + display_name = f"{distro.capitalize()} {distro_version}" + if distro_version not in _rpm_distro_info[distro]: + ctx.error(f"Support for {display_name} is missing.") + ctx.exit(1) + + if distro_arch == "aarch64": + ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") + distro_arch = "arm64" + + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + for fpath in incoming.iterdir(): + if ".src" in fpath.suffixes: + dpath = create_repo_path / "SRPMS" / fpath.name + else: + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if fpath.suffix == ".rpm": + ctx.info(f"Running 'rpmsign' on {dpath} ...") + ctx.run( + "rpmsign", + "--key-id", + key_id, + "--addsign", + "--digest-algo=sha256", + str(dpath), + ) + + createrepo = shutil.which("createrepo") + if createrepo is None: + container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" + ctx.info(f"Using docker container '{container}' to call 'createrepo'...") + uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() + gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() + ctx.run( + "docker", + "run", + "--rm", + "-v", + f"{create_repo_path.resolve()}:/code", + "-u", + f"{uid}:{gid}", + "-w", + "/code", + container, + "createrepo", + ".", + ) + else: + ctx.run("createrepo", ".", cwd=create_repo_path) + + if nightly_build_from: + repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") + else: + repo_domain = os.environ.get( + "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io" + ) + + salt_repo_user = os.environ.get("SALT_REPO_USER") + if salt_repo_user: + log.info( + "SALT_REPO_USER: %s", + salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1], + ) + salt_repo_pass = os.environ.get("SALT_REPO_PASS") + if salt_repo_pass: + log.info( + "SALT_REPO_PASS: %s", + salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1], + ) + if salt_repo_user and salt_repo_pass: + repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}" + + def _create_repo_file(create_repo_path, url_suffix): + ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") + if nightly_build_from: + base_url = f"salt-dev/{nightly_build_from}/" + repo_file_contents = "[salt-nightly-repo]" + elif "rc" in salt_version: + base_url = "salt_rc/" + repo_file_contents = "[salt-rc-repo]" + else: + base_url = "" + repo_file_contents = "[salt-repo]" + base_url += f"salt/py3/{distro}/{distro_version}/{distro_arch}/{url_suffix}" + if distro == "amazon": + distro_name = "Amazon Linux" + elif distro == "redhat": + distro_name = "RHEL/CentOS" + else: + distro_name = distro.capitalize() + + if distro != "photon" and int(distro_version) < 8: + failovermethod = "\n failovermethod=priority" + else: + failovermethod = "" + + repo_file_contents += textwrap.dedent( + f""" + name=Salt repo for {distro_name} {distro_version} PY3 + baseurl=https://{repo_domain}/{base_url} + skip_if_unavailable=True{failovermethod} + priority=10 + enabled=1 + enabled_metadata=1 + gpgcheck=1 + gpgkey=https://{repo_domain}/{base_url}/{tools.utils.GPG_KEY_FILENAME}.pub + """ + ) + create_repo_path.write_text(repo_file_contents) + + if nightly_build_from: + repo_file_path = create_repo_path.parent / "nightly.repo" + else: + repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" + + _create_repo_file(repo_file_path, f"minor/{salt_version}") + + if not nightly_build_from: + remote_versions = _get_remote_versions( + tools.utils.STAGING_BUCKET_NAME, + create_repo_path.parent.relative_to(repo_path), + ) + major_version = Version(salt_version).major + matching_major = None + for version in remote_versions: + if version.major == major_version: + matching_major = version + break + if not matching_major or matching_major <= salt_version: + major_link = create_repo_path.parent.parent / str(major_version) + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + major_link.symlink_to(f"minor/{salt_version}") + repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo" + _create_repo_file(repo_file_path, str(major_version)) + if not remote_versions or remote_versions[0] <= salt_version: + latest_link = create_repo_path.parent.parent / "latest" + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + latest_link.symlink_to(f"minor/{salt_version}") + repo_file_path = create_repo_path.parent.parent / "latest.repo" + _create_repo_file(repo_file_path, "latest") + + ctx.info("Done") + + +@create.command( + name="windows", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def windows( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the windows repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="windows", + pkg_suffixes=(".msi", ".exe"), + ) + ctx.info("Done") + + +@create.command( + name="macos", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def macos( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the windows repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="macos", + pkg_suffixes=(".pkg",), + ) + ctx.info("Done") + + +@create.command( + name="onedir", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def onedir( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the onedir repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="onedir", + pkg_suffixes=(".xz", ".zip"), + ) + ctx.info("Done") + + +@create.command( + name="src", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def src( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the onedir repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro="src", + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + create_repo_path = create_repo_path / salt_version + create_repo_path.mkdir(exist_ok=True, parents=True) + hashes_base_path = create_repo_path / f"salt-{salt_version}" + for fpath in incoming.iterdir(): + if fpath.suffix not in (".gz",): + continue + ctx.info(f"* Processing {fpath} ...") + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + for hash_name in ("blake2b", "sha512", "sha3_512"): + ctx.info(f" * Calculating {hash_name} ...") + hexdigest = _get_file_checksum(fpath, hash_name) + with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + with open(f"{dpath}.{hash_name}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + + for fpath in create_repo_path.iterdir(): + if fpath.suffix in (".pub", ".gpg"): + continue + tools.utils.gpg_sign(ctx, key_id, fpath) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + ctx.info("Done") + + +def _get_remote_versions(bucket_name: str, remote_path: str): + log.info( + "Getting remote versions from bucket %r under path: %s", + bucket_name, + remote_path, + ) + remote_path = str(remote_path) + if not remote_path.endswith("/"): + remote_path += "/" + + s3 = boto3.client("s3") + ret = s3.list_objects( + Bucket=bucket_name, + Delimiter="/", + Prefix=remote_path, + ) + if "CommonPrefixes" not in ret: + return [] + versions = [] + for entry in ret["CommonPrefixes"]: + _, version = entry["Prefix"].rstrip("/").rsplit("/", 1) + if version == "latest": + continue + versions.append(Version(version)) + versions.sort(reverse=True) + log.info("Remote versions collected: %s", versions) + return versions + + +def _create_onedir_based_repo( + ctx: Context, + salt_version: str, + nightly_build_from: str | None, + repo_path: pathlib.Path, + incoming: pathlib.Path, + key_id: str, + distro: str, + pkg_suffixes: tuple[str, ...], +): + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, + ) + if not nightly_build_from: + repo_json_path = create_repo_path.parent.parent / "repo.json" + else: + repo_json_path = create_repo_path.parent / "repo.json" + + if nightly_build_from: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + else: + bucket_name = tools.utils.STAGING_BUCKET_NAME + + release_json = {} + + copy_exclusions = ( + ".blake2b", + ".sha512", + ".sha3_512", + ".BLAKE2B", + ".SHA512", + ".SHA3_512", + ".json", + ) + hashes_base_path = create_repo_path / f"salt-{salt_version}" + for fpath in incoming.iterdir(): + if fpath.suffix in copy_exclusions: + continue + ctx.info(f"* Processing {fpath} ...") + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if "-amd64" in dpath.name.lower(): + arch = "amd64" + elif "-x86_64" in dpath.name.lower(): + arch = "x86_64" + elif "-x86" in dpath.name.lower(): + arch = "x86" + elif "-aarch64" in dpath.name.lower(): + arch = "aarch64" + else: + ctx.error( + f"Cannot pickup the right architecture from the filename '{dpath.name}'." + ) + ctx.exit(1) + if distro == "onedir": + if "-onedir-linux-" in dpath.name.lower(): + release_os = "linux" + elif "-onedir-darwin-" in dpath.name.lower(): + release_os = "macos" + elif "-onedir-windows-" in dpath.name.lower(): + release_os = "windows" + else: + ctx.error( + f"Cannot pickup the right OS from the filename '{dpath.name}'." + ) + ctx.exit(1) + else: + release_os = distro + release_json[dpath.name] = { + "name": dpath.name, + "version": salt_version, + "os": release_os, + "arch": arch, + } + for hash_name in ("blake2b", "sha512", "sha3_512"): + ctx.info(f" * Calculating {hash_name} ...") + hexdigest = _get_file_checksum(fpath, hash_name) + release_json[dpath.name][hash_name.upper()] = hexdigest + with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + with open(f"{dpath}.{hash_name}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + + for fpath in create_repo_path.iterdir(): + if fpath.suffix in pkg_suffixes: + continue + tools.utils.gpg_sign(ctx, key_id, fpath) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + repo_json = get_repo_json_file_contents( + ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path + ) + if nightly_build_from: + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) + return + + major_version = Version(salt_version).major + minor_repo_json_path = create_repo_path.parent / "repo.json" + minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=bucket_name, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + minor_repo_json[salt_version] = release_json + versions = parse_versions(*list(minor_repo_json)) + ctx.info( + f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in versions)}" + ) + minor_versions = [v for v in versions if v.major == major_version] + ctx.info( + f"Collected versions(Matching major: {major_version}) from " + f"{minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in minor_versions)}" + ) + if not versions: + latest_version = Version(salt_version) + else: + latest_version = versions[0] + if not minor_versions: + latest_minor_version = Version(salt_version) + else: + latest_minor_version = minor_versions[0] + + ctx.info(f"Release Version: {salt_version}") + ctx.info(f"Latest Repo Version: {latest_version}") + ctx.info(f"Latest Release Minor Version: {latest_minor_version}") + + latest_link = create_repo_path.parent.parent / "latest" + if latest_version <= salt_version: + repo_json["latest"] = release_json + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + if latest_link.exists(): + latest_link.unlink() + latest_link.symlink_to(f"minor/{salt_version}") + else: + ctx.info( + f"Not creating the '{latest_link.relative_to(repo_path)}' symlink " + f"since {latest_version} > {salt_version}" + ) + + major_link = create_repo_path.parent.parent / str(major_version) + if latest_minor_version <= salt_version: + minor_repo_json["latest"] = release_json + # This is the latest minor, update the major in the top level repo.json + # to this version + repo_json[str(major_version)] = release_json + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + if major_link.exists(): + major_link.unlink() + major_link.symlink_to(f"minor/{salt_version}") + else: + ctx.info( + f"Not creating the '{major_link.relative_to(repo_path)}' symlink " + f"since {latest_minor_version} > {salt_version}" + ) + + ctx.info(f"Writing {minor_repo_json_path} ...") + minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True)) + + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) + + +def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: + + with fpath.open("rb") as rfh: + try: + digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] + except AttributeError: + # Python < 3.11 + buf = bytearray(2**18) # Reusable buffer to reduce allocations. + view = memoryview(buf) + digest = getattr(hashlib, hash_name)() + while True: + size = rfh.readinto(buf) + if size == 0: + break # EOF + digest.update(view[:size]) + hexdigest: str = digest.hexdigest() + return hexdigest diff --git a/tools/pkg/repo/publish.py b/tools/pkg/repo/publish.py new file mode 100644 index 00000000000..cc6a92235c4 --- /dev/null +++ b/tools/pkg/repo/publish.py @@ -0,0 +1,653 @@ +""" +These commands are used to build the pacakge repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import fnmatch +import json +import logging +import os +import pathlib +import re +import sys +import tempfile +import textwrap +from typing import TYPE_CHECKING, Any + +import packaging.version +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import ( + Version, + create_full_repo_path, + get_repo_json_file_contents, + get_salt_releases, + parse_versions, +) + +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + +log = logging.getLogger(__name__) + +publish = command_group( + name="publish", + help="Packaging Repository Publication Related Commands", + parent=["pkg", "repo"], +) + + +@publish.command( + arguments={ + "repo_path": { + "help": "Local path for the repository that shall be published.", + }, + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + } +) +def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): + """ + Publish to the nightly bucket. + """ + if TYPE_CHECKING: + assert salt_version is not None + _publish_repo( + ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version + ) + + +@publish.command( + arguments={ + "repo_path": { + "help": "Local path for the repository that shall be published.", + }, + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + } +) +def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): + """ + Publish to the staging bucket. + """ + if TYPE_CHECKING: + assert salt_version is not None + _publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version) + + +@publish.command( + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + } +) +def release(ctx: Context, salt_version: str): + """ + Publish to the release bucket. + """ + if "rc" in salt_version: + bucket_folder = "salt_rc/salt/py3" + else: + bucket_folder = "salt/py3" + + files_to_copy: list[str] + directories_to_delete: list[str] = [] + + ctx.info("Grabbing remote file listing of files to copy...") + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + with local_release_files_path.open("wb") as wfh: + ctx.info( + f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_release_files_path), + Fileobj=wfh, + ) + files_to_copy = json.loads(local_release_files_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_files_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_files_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + local_release_symlinks_path = ( + pathlib.Path(tsd) / repo_release_symlinks_path.name + ) + try: + with local_release_symlinks_path.open("wb") as wfh: + ctx.info( + f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_release_symlinks_path), + Fileobj=wfh, + ) + directories_to_delete = json.loads(local_release_symlinks_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_symlinks_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") + ctx.exit(1) + + if directories_to_delete: + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Deleting directories to override.", + total=len(directories_to_delete), + ) + for directory in directories_to_delete: + try: + objects_to_delete: list[dict[str, str]] = [] + for path in _get_repo_file_list( + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + bucket_folder=bucket_folder, + glob_match=f"{directory}/**", + ): + objects_to_delete.append({"Key": path}) + if objects_to_delete: + s3.delete_objects( + Bucket=tools.utils.RELEASE_BUCKET_NAME, + Delete={"Objects": objects_to_delete}, + ) + except ClientError: + log.exception("Failed to delete remote files") + finally: + progress.update(task, advance=1) + + already_copied_files: list[str] = [] + s3 = boto3.client("s3") + dot_repo_files = [] + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Copying files between buckets", total=len(files_to_copy) + ) + for fpath in files_to_copy: + if fpath in already_copied_files: + continue + if fpath.endswith(".repo"): + dot_repo_files.append(fpath) + ctx.info(f" * Copying {fpath}") + try: + s3.copy_object( + Bucket=tools.utils.RELEASE_BUCKET_NAME, + Key=fpath, + CopySource={ + "Bucket": tools.utils.STAGING_BUCKET_NAME, + "Key": fpath, + }, + MetadataDirective="COPY", + TaggingDirective="COPY", + ServerSideEncryption="AES256", + ) + already_copied_files.append(fpath) + except ClientError: + log.exception(f"Failed to copy {fpath}") + finally: + progress.update(task, advance=1) + + # Now let's get the onedir based repositories where we need to update several repo.json + major_version = packaging.version.parse(salt_version).major + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + repo_path = pathlib.Path(tsd) + for distro in ("windows", "macos", "onedir"): + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro=distro, + ) + repo_json_path = create_repo_path.parent.parent / "repo.json" + + release_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=repo_json_path, + ) + minor_repo_json_path = create_repo_path.parent / "repo.json" + + staging_minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.STAGING_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + release_minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + + release_json = staging_minor_repo_json[salt_version] + + major_version = Version(salt_version).major + versions = parse_versions(*list(release_minor_repo_json)) + ctx.info( + f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in versions)}" + ) + minor_versions = [v for v in versions if v.major == major_version] + ctx.info( + f"Collected versions(Matching major: {major_version}) from " + f"{minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in minor_versions)}" + ) + if not versions: + latest_version = Version(salt_version) + else: + latest_version = versions[0] + if not minor_versions: + latest_minor_version = Version(salt_version) + else: + latest_minor_version = minor_versions[0] + + ctx.info(f"Release Version: {salt_version}") + ctx.info(f"Latest Repo Version: {latest_version}") + ctx.info(f"Latest Release Minor Version: {latest_minor_version}") + + # Add the minor version + release_minor_repo_json[salt_version] = release_json + + if latest_version <= salt_version: + release_repo_json["latest"] = release_json + + if latest_minor_version <= salt_version: + release_minor_repo_json["latest"] = release_json + + ctx.info(f"Writing {minor_repo_json_path} ...") + minor_repo_json_path.write_text( + json.dumps(release_minor_repo_json, sort_keys=True) + ) + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True)) + + # And now, let's get the several rpm "*.repo" files to update the base + # domain from staging to release + release_domain = os.environ.get( + "SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io" + ) + for path in dot_repo_files: + repo_file_path = repo_path.joinpath(path) + repo_file_path.parent.mkdir(exist_ok=True, parents=True) + bucket_name = tools.utils.STAGING_BUCKET_NAME + try: + ret = s3.head_object(Bucket=bucket_name, Key=path) + ctx.info( + f"Downloading existing '{repo_file_path.relative_to(repo_path)}' " + f"file from bucket {bucket_name}" + ) + size = ret["ContentLength"] + with repo_file_path.open("wb") as wfh: + with tools.utils.create_progress_bar( + file_progress=True + ) as progress: + task = progress.add_task( + description="Downloading...", total=size + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=path, + Fileobj=wfh, + Callback=tools.utils.UpdateProgress(progress, task), + ) + updated_contents = re.sub( + r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$", + rf"\1=https://{release_domain}/\3", + repo_file_path.read_text(), + flags=re.MULTILINE, + ) + ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:") + ctx.print(updated_contents) + repo_file_path.write_text(updated_contents) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}") + + for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): + for path in filenames: + upload_path = pathlib.Path(dirpath, path) + relpath = upload_path.relative_to(repo_path) + size = upload_path.stat().st_size + ctx.info(f" {relpath}") + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Uploading...", total=size) + s3.upload_file( + str(upload_path), + tools.utils.RELEASE_BUCKET_NAME, + str(relpath), + Callback=tools.utils.UpdateProgress(progress, task), + ) + + +@publish.command( + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + } +) +def github( + ctx: Context, + salt_version: str, + key_id: str = None, + repository: str = "saltstack/salt", +): + """ + Publish the release on GitHub releases. + """ + if TYPE_CHECKING: + assert key_id is not None + + s3 = boto3.client("s3") + + # Let's download the release artifacts stored in staging + artifacts_path = pathlib.Path.cwd() / "release-artifacts" + artifacts_path.mkdir(exist_ok=True) + release_artifacts_listing: dict[pathlib.Path, int] = {} + continuation_token = None + while True: + kwargs: dict[str, str] = {} + if continuation_token: + kwargs["ContinuationToken"] = continuation_token + ret = s3.list_objects_v2( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Prefix=f"release-artifacts/{salt_version}", + FetchOwner=False, + **kwargs, + ) + contents = ret.pop("Contents", None) + if contents is None: + break + for entry in contents: + entry_path = pathlib.Path(entry["Key"]) + if entry_path.name.startswith("."): + continue + release_artifacts_listing[entry_path] = entry["Size"] + if not ret["IsTruncated"]: + break + continuation_token = ret["NextContinuationToken"] + + for entry_path, size in release_artifacts_listing.items(): + ctx.info(f" * {entry_path.name}") + local_path = artifacts_path / entry_path.name + with local_path.open("wb") as wfh: + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Downloading...", total=size) + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=str(entry_path), + Fileobj=wfh, + Callback=tools.utils.UpdateProgress(progress, task), + ) + + for artifact in artifacts_path.iterdir(): + if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"): + continue + tools.utils.gpg_sign(ctx, key_id, artifact) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, artifacts_path) + + release_message = f"""\ + # Welcome to Salt v{salt_version} + + | :exclamation: ATTENTION | + |:-------------------------------------------------------------------------------------------------------------------------| + | The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. | + | Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`). + """ + release_message_path = artifacts_path / "gh-release-body.md" + release_message_path.write_text(textwrap.dedent(release_message).strip()) + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.") + ctx.exit(0) + + if TYPE_CHECKING: + assert github_output is not None + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") + + releases = get_salt_releases(ctx, repository) + if Version(salt_version) >= releases[-1]: + make_latest = True + else: + make_latest = False + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"make-latest={json.dumps(make_latest)}\n") + + artifacts_to_upload = [] + for artifact in artifacts_path.iterdir(): + if artifact.suffix == ".patch": + continue + if artifact.name == release_message_path.name: + continue + artifacts_to_upload.append(str(artifact.resolve())) + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n") + ctx.exit(0) + + +def _get_repo_detailed_file_list( + bucket_name: str, + bucket_folder: str = "", + glob_match: str = "**", +) -> list[dict[str, Any]]: + s3 = boto3.client("s3") + listing: list[dict[str, Any]] = [] + continuation_token = None + while True: + kwargs: dict[str, str] = {} + if continuation_token: + kwargs["ContinuationToken"] = continuation_token + ret = s3.list_objects_v2( + Bucket=bucket_name, + Prefix=bucket_folder, + FetchOwner=False, + **kwargs, + ) + contents = ret.pop("Contents", None) + if contents is None: + break + for entry in contents: + if fnmatch.fnmatch(entry["Key"], glob_match): + listing.append(entry) + if not ret["IsTruncated"]: + break + continuation_token = ret["NextContinuationToken"] + return listing + + +def _get_repo_file_list( + bucket_name: str, bucket_folder: str, glob_match: str +) -> list[str]: + return [ + entry["Key"] + for entry in _get_repo_detailed_file_list( + bucket_name, bucket_folder, glob_match=glob_match + ) + ] + + +def _publish_repo( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + nightly_build: bool = False, + stage: bool = False, +): + """ + Publish packaging repositories. + """ + if nightly_build: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + elif stage: + bucket_name = tools.utils.STAGING_BUCKET_NAME + else: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + + ctx.info("Preparing upload ...") + s3 = boto3.client("s3") + to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {} + to_upload_paths: list[pathlib.Path] = [] + symlink_paths: list[str] = [] + uploaded_files: list[str] = [] + for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): + for dirname in dirnames: + path = pathlib.Path(dirpath, dirname) + if not path.is_symlink(): + continue + # This is a symlink, then we need to delete all files under + # that directory in S3 because S3 does not understand symlinks + # and we would end up adding files to that folder instead of + # replacing it. + try: + relpath = path.relative_to(repo_path) + ret = s3.list_objects( + Bucket=bucket_name, + Prefix=str(relpath), + ) + if "Contents" not in ret: + continue + objects = [] + for entry in ret["Contents"]: + objects.append({"Key": entry["Key"]}) + to_delete_paths[path] = objects + symlink_paths.append(str(relpath)) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + + for fpath in filenames: + path = pathlib.Path(dirpath, fpath) + to_upload_paths.append(path) + + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Deleting directories to override.", total=len(to_delete_paths) + ) + for base, objects in to_delete_paths.items(): + relpath = base.relative_to(repo_path) + bucket_uri = f"s3://{bucket_name}/{relpath}" + progress.update(task, description=f"Deleting {bucket_uri}") + try: + ret = s3.delete_objects( + Bucket=bucket_name, + Delete={"Objects": objects}, + ) + except ClientError: + log.exception(f"Failed to delete {bucket_uri}") + finally: + progress.update(task, advance=1) + + try: + ctx.info("Uploading repository ...") + for upload_path in to_upload_paths: + relpath = upload_path.relative_to(repo_path) + size = upload_path.stat().st_size + ctx.info(f" {relpath}") + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Uploading...", total=size) + s3.upload_file( + str(upload_path), + bucket_name, + str(relpath), + Callback=tools.utils.UpdateProgress(progress, task), + ExtraArgs={ + "Metadata": { + "x-amz-meta-salt-release-version": salt_version, + } + }, + ) + uploaded_files.append(str(relpath)) + if stage is True: + repo_files_path = f"release-artifacts/{salt_version}/.release-files.json" + ctx.info(f"Uploading {repo_files_path} ...") + s3.put_object( + Key=repo_files_path, + Bucket=bucket_name, + Body=json.dumps(uploaded_files).encode(), + Metadata={ + "x-amz-meta-salt-release-version": salt_version, + }, + ) + repo_symlinks_path = ( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + ctx.info(f"Uploading {repo_symlinks_path} ...") + s3.put_object( + Key=repo_symlinks_path, + Bucket=bucket_name, + Body=json.dumps(symlink_paths).encode(), + Metadata={ + "x-amz-meta-salt-release-version": salt_version, + }, + ) + except KeyboardInterrupt: + pass diff --git a/tools/utils.py b/tools/utils.py index cb4379c61e0..28a79745844 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -1,8 +1,12 @@ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations +import json import os import pathlib +import sys +from datetime import datetime +from typing import Any import packaging.version from ptscripts import Context @@ -16,6 +20,18 @@ from rich.progress import ( TransferSpeedColumn, ) +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod" @@ -169,3 +185,114 @@ def get_salt_releases(ctx: Context, repository: str) -> list[Version]: # We're not going to parse dash or docs releases versions.add(Version(name)) return sorted(versions) + + +def parse_versions(*versions: str) -> list[Version]: + _versions = [] + for version in set(versions): + if version == "latest": + continue + _versions.append(Version(version)) + if _versions: + _versions.sort(reverse=True) + return _versions + + +def get_repo_json_file_contents( + ctx: Context, + bucket_name: str, + repo_path: pathlib.Path, + repo_json_path: pathlib.Path, +) -> dict[str, Any]: + s3 = boto3.client("s3") + repo_json: dict[str, Any] = {} + try: + ret = s3.head_object( + Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path)) + ) + ctx.info( + f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file " + f"from bucket {bucket_name}" + ) + size = ret["ContentLength"] + with repo_json_path.open("wb") as wfh: + with create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Downloading...", total=size) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_json_path.relative_to(repo_path)), + Fileobj=wfh, + Callback=UpdateProgress(progress, task), + ) + with repo_json_path.open() as rfh: + repo_json = json.load(rfh) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}") + if repo_json: + ctx.print(repo_json, soft_wrap=True) + return repo_json + + +def create_top_level_repo_path( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + distro: str, + distro_version: str | None = None, # pylint: disable=bad-whitespace + distro_arch: str | None = None, # pylint: disable=bad-whitespace + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace +): + create_repo_path = repo_path + if nightly_build_from: + create_repo_path = ( + create_repo_path + / "salt-dev" + / nightly_build_from + / datetime.utcnow().strftime("%Y-%m-%d") + ) + create_repo_path.mkdir(exist_ok=True, parents=True) + with ctx.chdir(create_repo_path.parent): + latest_nightly_symlink = pathlib.Path("latest") + if not latest_nightly_symlink.exists(): + ctx.info( + f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." + ) + latest_nightly_symlink.symlink_to( + create_repo_path.name, target_is_directory=True + ) + elif "rc" in salt_version: + create_repo_path = create_repo_path / "salt_rc" + create_repo_path = create_repo_path / "salt" / "py3" / distro + if distro_version: + create_repo_path = create_repo_path / distro_version + if distro_arch: + create_repo_path = create_repo_path / distro_arch + create_repo_path.mkdir(exist_ok=True, parents=True) + return create_repo_path + + +def create_full_repo_path( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + distro: str, + distro_version: str | None = None, # pylint: disable=bad-whitespace + distro_arch: str | None = None, # pylint: disable=bad-whitespace + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace +): + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version, + distro_arch, + nightly_build_from=nightly_build_from, + ) + create_repo_path = create_repo_path / "minor" / salt_version + create_repo_path.mkdir(exist_ok=True, parents=True) + return create_repo_path From 4896c90684a955111ef7a8cdcd9da9ca5d475c99 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 8 May 2023 15:30:25 -0400 Subject: [PATCH 04/25] Address review comments (typos, docs) --- tools/pkg/repo/__init__.py | 2 +- tools/pkg/repo/create.py | 2 +- tools/pkg/repo/publish.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/tools/pkg/repo/__init__.py b/tools/pkg/repo/__init__.py index 8a3cbd9c81f..d965fcfd923 100644 --- a/tools/pkg/repo/__init__.py +++ b/tools/pkg/repo/__init__.py @@ -1,5 +1,5 @@ """ -These commands are used to build the pacakge repository files. +These commands are used to build the package repository files. """ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index ec4b3331c42..60ed8ad0570 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -1,5 +1,5 @@ """ -These commands are used to build the pacakge repository files. +These commands are used to build the package repository files. """ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations diff --git a/tools/pkg/repo/publish.py b/tools/pkg/repo/publish.py index cc6a92235c4..1c87d20b490 100644 --- a/tools/pkg/repo/publish.py +++ b/tools/pkg/repo/publish.py @@ -1,5 +1,5 @@ """ -These commands are used to build the pacakge repository files. +These commands are used to build the package repository files. """ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations @@ -55,7 +55,7 @@ publish = command_group( "help": "Local path for the repository that shall be published.", }, "salt_version": { - "help": "The salt version for which to build the repository", + "help": "The salt version of the repository to publish", "required": True, }, } @@ -77,7 +77,7 @@ def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): "help": "Local path for the repository that shall be published.", }, "salt_version": { - "help": "The salt version for which to build the repository", + "help": "The salt version of the repository to publish", "required": True, }, } From 33e2538aa873c6269777ddd05bb7293ff7bbcc87 Mon Sep 17 00:00:00 2001 From: Frode Gundersen Date: Thu, 23 Feb 2023 22:49:52 +0000 Subject: [PATCH 05/25] migrate unit_states_test_linux_acl to pytest --- tests/pytests/unit/states/test_linux_acl.py | 539 ++++++++++++++++++ tests/unit/states/test_linux_acl.py | 589 -------------------- 2 files changed, 539 insertions(+), 589 deletions(-) create mode 100644 tests/pytests/unit/states/test_linux_acl.py delete mode 100644 tests/unit/states/test_linux_acl.py diff --git a/tests/pytests/unit/states/test_linux_acl.py b/tests/pytests/unit/states/test_linux_acl.py new file mode 100644 index 00000000000..976a57b8c4b --- /dev/null +++ b/tests/pytests/unit/states/test_linux_acl.py @@ -0,0 +1,539 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.states.linux_acl +""" + +import pytest + +import salt.states.linux_acl as linux_acl +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.skip_unless_on_linux( + reason="Only run on Linux", + ) +] + + +@pytest.fixture +def configure_loader_modules(): + return {linux_acl: {}} + + +def test_present(): + """ + Test to ensure a Linux ACL is present + """ + maxDiff = None + name = "/root" + acl_type = "users" + acl_name = "damian" + perms = "rwx" + + mock = MagicMock( + side_effect=[ + {name: {acl_type: [{acl_name: {"octal": 5}}]}}, + {name: {acl_type: [{acl_name: {"octal": 5}}]}}, + {name: {acl_type: [{acl_name: {"octal": 5}}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + { + name: {acl_type: [{acl_name: {"octal": 7}}]}, + name + "/foo": {acl_type: [{acl_name: {"octal": 5}}]}, + }, + { + name: {acl_type: [{acl_name: {"octal": 7}}]}, + name + "/foo": {acl_type: [{acl_name: {"octal": 7}}]}, + }, + {name: {acl_type: ""}}, + { + name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + }, + { + name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + }, + { + name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + }, + ] + ) + mock_modfacl = MagicMock(return_value=True) + + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Updated permissions will be applied for {}: r-x -> {}".format( + acl_name, perms + ) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + }, + "old": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": "r-x", + }, + }, + "result": None, + } + + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # Update - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Updated permissions for {}".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + }, + "old": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": "r-x", + }, + }, + "result": True, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # Update - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # New - test=True + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "New permissions will be applied for {}: {}".format( + acl_name, perms + ) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + } + }, + "result": None, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # New - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Applied new permissions for {}".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + } + }, + "result": True, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # New - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + + # New - recurse true + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Updated permissions will be applied for {}: rwx -> {}".format( + acl_name, perms + ) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + }, + "old": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": "rwx", + }, + }, + "result": None, + } + + assert ( + linux_acl.present(name, acl_type, acl_name, perms, recurse=True) + == ret + ) + + # New - recurse true - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present(name, acl_type, acl_name, perms, recurse=True) + == ret + ) + + # No acl type + comt = "ACL Type does not exist" + ret = {"name": name, "comment": comt, "result": False, "changes": {}} + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + + # default recurse false - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present( + name, "d:" + acl_type, acl_name, perms, recurse=False + ) + == ret + ) + + # default recurse false - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present( + name, "d:" + acl_type, acl_name, perms, recurse=False + ) + == ret + ) + + # default recurse true - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present( + name, "d:" + acl_type, acl_name, perms, recurse=True + ) + == ret + ) + + +def test_absent(): + """ + Test to ensure a Linux ACL does not exist + """ + name = "/root" + acl_type = "users" + acl_name = "damian" + perms = "rwx" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + mock = MagicMock( + side_effect=[ + {name: {acl_type: [{acl_name: {"octal": "A"}}]}}, + {name: {acl_type: ""}}, + ] + ) + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Removing permissions" + ret.update({"comment": comt}) + assert linux_acl.absent(name, acl_type, acl_name, perms) == ret + + comt = "ACL Type does not exist" + ret.update({"comment": comt, "result": False}) + assert linux_acl.absent(name, acl_type, acl_name, perms) == ret + + +def test_list_present(): + """ + Test to ensure a Linux ACL is present + """ + maxDiff = None + name = "/root" + acl_type = "user" + acl_names = ["root", "damian", "homer"] + acl_comment = {"owner": "root", "group": "root", "file": "/root"} + perms = "rwx" + + mock = MagicMock( + side_effect=[ + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}}, + {acl_names[1]: {"octal": "A"}}, + {acl_names[2]: {"octal": "A"}}, + ], + "comment": acl_comment, + } + }, + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}}, + {acl_names[1]: {"octal": "A"}}, + ], + "comment": acl_comment, + } + }, + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}}, + {acl_names[1]: {"octal": "A"}}, + ] + } + }, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: ""}}, + ] + ) + mock_modfacl = MagicMock(return_value=True) + + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Updated permissions will be applied for {}: A -> {}".format( + acl_names, perms + ) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": 7, + }, + "old": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": "A", + }, + }, + "result": None, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert ret == expected + + # Update - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Applied new permissions for {}".format(", ".join(acl_names)) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": "rwx", + } + }, + "result": True, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # Update - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_names) + expected = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # New - test=True + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "New permissions will be applied for {}: {}".format( + acl_names, perms + ) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": perms, + } + }, + "result": None, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # New - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Applied new permissions for {}".format(", ".join(acl_names)) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": perms, + } + }, + "result": True, + } + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # New - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_names) + expected = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # No acl type + comt = "ACL Type does not exist" + expected = { + "name": name, + "comment": comt, + "result": False, + "changes": {}, + } + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + +def test_list_absent(): + """ + Test to ensure a Linux ACL does not exist + """ + name = "/root" + acl_type = "users" + acl_names = ["damian", "homer"] + perms = "rwx" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + mock = MagicMock( + side_effect=[ + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}, acl_names[1]: {"octal": "A"}} + ] + } + }, + {name: {acl_type: ""}}, + ] + ) + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Removing permissions" + ret.update({"comment": comt}) + assert linux_acl.list_absent(name, acl_type, acl_names, perms) == ret + + comt = "ACL Type does not exist" + ret.update({"comment": comt, "result": False}) + assert linux_acl.list_absent(name, acl_type, acl_names) == ret + + +def test_absent_recursive(): + """ + Test to ensure a Linux ACL does not exist + """ + name = "/root" + acl_type = "users" + acl_name = "damian" + perms = "rwx" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + mock = MagicMock( + side_effect=[ + { + name: {acl_type: [{acl_name: {"octal": 7}}]}, + name + "/foo": {acl_type: [{acl_name: {"octal": "A"}}]}, + } + ] + ) + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Removing permissions" + ret.update({"comment": comt}) + assert ( + linux_acl.absent(name, acl_type, acl_name, perms, recurse=True) == ret + ) diff --git a/tests/unit/states/test_linux_acl.py b/tests/unit/states/test_linux_acl.py deleted file mode 100644 index 2961fbad53a..00000000000 --- a/tests/unit/states/test_linux_acl.py +++ /dev/null @@ -1,589 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import pytest - -import salt.states.linux_acl as linux_acl -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -@pytest.mark.skip_unless_on_linux -class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.linux_acl - """ - - def setup_loader_modules(self): - return {linux_acl: {}} - - # 'present' function tests: 1 - - def test_present(self): - """ - Test to ensure a Linux ACL is present - """ - self.maxDiff = None - name = "/root" - acl_type = "users" - acl_name = "damian" - perms = "rwx" - - mock = MagicMock( - side_effect=[ - {name: {acl_type: [{acl_name: {"octal": 5}}]}}, - {name: {acl_type: [{acl_name: {"octal": 5}}]}}, - {name: {acl_type: [{acl_name: {"octal": 5}}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - { - name: {acl_type: [{acl_name: {"octal": 7}}]}, - name + "/foo": {acl_type: [{acl_name: {"octal": 5}}]}, - }, - { - name: {acl_type: [{acl_name: {"octal": 7}}]}, - name + "/foo": {acl_type: [{acl_name: {"octal": 7}}]}, - }, - {name: {acl_type: ""}}, - { - name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - }, - { - name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - }, - { - name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - }, - ] - ) - mock_modfacl = MagicMock(return_value=True) - - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Updated permissions will be applied for {}: r-x -> {}".format( - acl_name, perms - ) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - }, - "old": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": "r-x", - }, - }, - "result": None, - } - - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # Update - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Updated permissions for {}".format(acl_name) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - }, - "old": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": "r-x", - }, - }, - "result": True, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # Update - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_name - ) - ret = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # New - test=True - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "New permissions will be applied for {}: {}".format( - acl_name, perms - ) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - } - }, - "result": None, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # New - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Applied new permissions for {}".format(acl_name) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - } - }, - "result": True, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # New - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_name - ) - ret = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - - # New - recurse true - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = ( - "Updated permissions will be applied for {}: rwx -> {}".format( - acl_name, perms - ) - ) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - }, - "old": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": "rwx", - }, - }, - "result": None, - } - - self.assertDictEqual( - linux_acl.present( - name, acl_type, acl_name, perms, recurse=True - ), - ret, - ) - - # New - recurse true - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, acl_type, acl_name, perms, recurse=True - ), - ret, - ) - - # No acl type - comt = "ACL Type does not exist" - ret = {"name": name, "comment": comt, "result": False, "changes": {}} - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - - # default recurse false - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, "d:" + acl_type, acl_name, perms, recurse=False - ), - ret, - ) - - # default recurse false - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, "d:" + acl_type, acl_name, perms, recurse=False - ), - ret, - ) - - # default recurse true - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, "d:" + acl_type, acl_name, perms, recurse=True - ), - ret, - ) - - # 'absent' function tests: 2 - - def test_absent(self): - """ - Test to ensure a Linux ACL does not exist - """ - name = "/root" - acl_type = "users" - acl_name = "damian" - perms = "rwx" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - mock = MagicMock( - side_effect=[ - {name: {acl_type: [{acl_name: {"octal": "A"}}]}}, - {name: {acl_type: ""}}, - ] - ) - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Removing permissions" - ret.update({"comment": comt}) - self.assertDictEqual( - linux_acl.absent(name, acl_type, acl_name, perms), ret - ) - - comt = "ACL Type does not exist" - ret.update({"comment": comt, "result": False}) - self.assertDictEqual(linux_acl.absent(name, acl_type, acl_name, perms), ret) - - # 'list_present' function tests: 1 - - def test_list_present(self): - """ - Test to ensure a Linux ACL is present - """ - self.maxDiff = None - name = "/root" - acl_type = "user" - acl_names = ["root", "damian", "homer"] - acl_comment = {"owner": "root", "group": "root", "file": "/root"} - perms = "rwx" - - mock = MagicMock( - side_effect=[ - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}}, - {acl_names[1]: {"octal": "A"}}, - {acl_names[2]: {"octal": "A"}}, - ], - "comment": acl_comment, - } - }, - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}}, - {acl_names[1]: {"octal": "A"}}, - ], - "comment": acl_comment, - } - }, - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}}, - {acl_names[1]: {"octal": "A"}}, - ] - } - }, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: ""}}, - ] - ) - mock_modfacl = MagicMock(return_value=True) - - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Updated permissions will be applied for {}: A -> {}".format( - acl_names, perms - ) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": 7, - }, - "old": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": "A", - }, - }, - "result": None, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(ret, expected) - - # Update - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Applied new permissions for {}".format(", ".join(acl_names)) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": "rwx", - } - }, - "result": True, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # Update - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_names - ) - expected = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # New - test=True - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "New permissions will be applied for {}: {}".format( - acl_names, perms - ) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": perms, - } - }, - "result": None, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # New - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Applied new permissions for {}".format(", ".join(acl_names)) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": perms, - } - }, - "result": True, - } - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # New - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_names - ) - expected = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # No acl type - comt = "ACL Type does not exist" - expected = { - "name": name, - "comment": comt, - "result": False, - "changes": {}, - } - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # 'list_absent' function tests: 2 - - def test_list_absent(self): - """ - Test to ensure a Linux ACL does not exist - """ - name = "/root" - acl_type = "users" - acl_names = ["damian", "homer"] - perms = "rwx" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - mock = MagicMock( - side_effect=[ - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}, acl_names[1]: {"octal": "A"}} - ] - } - }, - {name: {acl_type: ""}}, - ] - ) - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Removing permissions" - ret.update({"comment": comt}) - self.assertDictEqual( - linux_acl.list_absent(name, acl_type, acl_names, perms), ret - ) - - comt = "ACL Type does not exist" - ret.update({"comment": comt, "result": False}) - self.assertDictEqual(linux_acl.list_absent(name, acl_type, acl_names), ret) - - def test_absent_recursive(self): - """ - Test to ensure a Linux ACL does not exist - """ - name = "/root" - acl_type = "users" - acl_name = "damian" - perms = "rwx" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - mock = MagicMock( - side_effect=[ - { - name: {acl_type: [{acl_name: {"octal": 7}}]}, - name + "/foo": {acl_type: [{acl_name: {"octal": "A"}}]}, - } - ] - ) - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Removing permissions" - ret.update({"comment": comt}) - self.assertDictEqual( - linux_acl.absent(name, acl_type, acl_name, perms, recurse=True), ret - ) From b91f363951195cbb832cf18a726d51468663003f Mon Sep 17 00:00:00 2001 From: Frode Gundersen Date: Mon, 10 Apr 2023 11:38:53 -0600 Subject: [PATCH 06/25] Update tests/pytests/unit/states/test_linux_acl.py Co-authored-by: Pedro Algarvio --- tests/pytests/unit/states/test_linux_acl.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/pytests/unit/states/test_linux_acl.py b/tests/pytests/unit/states/test_linux_acl.py index 976a57b8c4b..60bbe55f51c 100644 --- a/tests/pytests/unit/states/test_linux_acl.py +++ b/tests/pytests/unit/states/test_linux_acl.py @@ -299,7 +299,6 @@ def test_list_present(): """ Test to ensure a Linux ACL is present """ - maxDiff = None name = "/root" acl_type = "user" acl_names = ["root", "damian", "homer"] From 6503765b3fcbe222db2830ca4d3aa92cfd0a5d02 Mon Sep 17 00:00:00 2001 From: jeanluc Date: Sat, 6 May 2023 23:57:52 +0200 Subject: [PATCH 07/25] Add test for issue 64232 --- .../integration/modules/test_x509_v2.py | 29 +++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/tests/pytests/integration/modules/test_x509_v2.py b/tests/pytests/integration/modules/test_x509_v2.py index 2fd005778c5..99d0d213397 100644 --- a/tests/pytests/integration/modules/test_x509_v2.py +++ b/tests/pytests/integration/modules/test_x509_v2.py @@ -673,6 +673,35 @@ def test_sign_remote_certificate_copypath(x509_salt_call_cli, cert_args, tmp_pat assert (tmp_path / f"{cert.serial_number:x}.crt").exists() +def test_create_private_key(x509_salt_call_cli): + """ + Ensure calling from the CLI works as expected and does not complain + about unknown internal kwargs (__pub_fun etc). + """ + ret = x509_salt_call_cli.run("x509.create_private_key") + assert ret.returncode == 0 + assert ret.data + assert ret.data.startswith("-----BEGIN PRIVATE KEY-----") + + +def test_create_crl(x509_salt_call_cli, ca_key, ca_cert, x509_pkidir): + """ + Ensure calling from the CLI works as expected and does not complain + about unknown internal kwargs (__pub_fun etc). + """ + with pytest.helpers.temp_file("key", ca_key, x509_pkidir) as ca_keyfile: + with pytest.helpers.temp_file("cert", ca_cert, x509_pkidir) as ca_certfile: + ret = x509_salt_call_cli.run( + "x509.create_crl", + revoked=[], + signing_private_key=str(ca_keyfile), + signing_cert=str(ca_certfile), + ) + assert ret.returncode == 0 + assert ret.data + assert ret.data.startswith("-----BEGIN X509 CRL-----") + + def _belongs_to(cert_or_pubkey, privkey): if isinstance(cert_or_pubkey, cx509.Certificate): cert_or_pubkey = cert_or_pubkey.public_key() From 57608c00679ef7acd328e664fd54eba141f9ed9d Mon Sep 17 00:00:00 2001 From: jeanluc Date: Sat, 6 May 2023 23:58:56 +0200 Subject: [PATCH 08/25] Fix x509_v2 unknown salt-internal kwargs --- changelog/64232.fixed.md | 1 + salt/modules/x509_v2.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) create mode 100644 changelog/64232.fixed.md diff --git a/changelog/64232.fixed.md b/changelog/64232.fixed.md new file mode 100644 index 00000000000..45a5ccb90ea --- /dev/null +++ b/changelog/64232.fixed.md @@ -0,0 +1 @@ +Fixed x509_v2 `create_private_key`/`create_crl` unknown kwargs: __pub_fun... diff --git a/salt/modules/x509_v2.py b/salt/modules/x509_v2.py index b46d4cf57d7..0725b1b5624 100644 --- a/salt/modules/x509_v2.py +++ b/salt/modules/x509_v2.py @@ -901,8 +901,11 @@ def create_crl( salt.utils.versions.kwargs_warn_until(["text"], "Potassium") kwargs.pop("text") - if kwargs: - raise SaltInvocationError(f"Unrecognized keyword arguments: {list(kwargs)}") + unknown = [kwarg for kwarg in kwargs if not kwarg.startswith("_")] + if unknown: + raise SaltInvocationError( + f"Unrecognized keyword arguments: {list(unknown)}" + ) if days_valid is None: try: @@ -1235,8 +1238,9 @@ def create_private_key( for x in ignored_params: kwargs.pop(x) - if kwargs: - raise SaltInvocationError(f"Unrecognized keyword arguments: {list(kwargs)}") + unknown = [kwarg for kwarg in kwargs if not kwarg.startswith("_")] + if unknown: + raise SaltInvocationError(f"Unrecognized keyword arguments: {list(unknown)}") if encoding not in ["der", "pem", "pkcs12"]: raise CommandExecutionError( From aeaf55815ad09082ab1d9f9925b5732e0bce097b Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Wed, 26 Apr 2023 14:26:55 -0500 Subject: [PATCH 09/25] Call global logger when catching pip.list exceptions in states.pip.installed --- changelog/64169.fixed.md | 1 + salt/states/pip_state.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog/64169.fixed.md diff --git a/changelog/64169.fixed.md b/changelog/64169.fixed.md new file mode 100644 index 00000000000..499b94b693b --- /dev/null +++ b/changelog/64169.fixed.md @@ -0,0 +1 @@ +Call global logger when catching pip.list exceptions in states.pip.installed diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py index 542a7f6c751..fd99d6bd626 100644 --- a/salt/states/pip_state.py +++ b/salt/states/pip_state.py @@ -852,7 +852,7 @@ def installed( ) # If we fail, then just send False, and we'll try again in the next function call except Exception as exc: # pylint: disable=broad-except - log.exception(exc) + globals().get("log").exception(exc) pip_list = False for prefix, state_pkg_name, version_spec in pkgs_details: From 3c552ecb907ee956250b0fd09bab96a34b3420af Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Wed, 26 Apr 2023 18:27:11 -0500 Subject: [PATCH 10/25] Add unit test for #64169 --- tests/pytests/unit/states/test_pip.py | 69 +++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 tests/pytests/unit/states/test_pip.py diff --git a/tests/pytests/unit/states/test_pip.py b/tests/pytests/unit/states/test_pip.py new file mode 100644 index 00000000000..7e04602ce44 --- /dev/null +++ b/tests/pytests/unit/states/test_pip.py @@ -0,0 +1,69 @@ +""" + :codeauthor: Eric Graham +""" +import logging + +import pytest + +import salt.states.pip_state as pip_state +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return { + pip_state: { + '__env__': 'base', + '__opts__': { + 'test': False + } + } + } + + +def test_issue_64169(caplog): + pkg_to_install = 'nonexistent_package' + exception_message = 'Invalid JSON (test_issue_64169)' + + mock_pip_list = MagicMock(side_effect=[ + CommandExecutionError(exception_message), # pre-cache the pip list (preinstall) + {}, # Checking if the pkg is already installed + {pkg_to_install: '100.10.1'} # Confirming successful installation + ]) + mock_pip_version = MagicMock(return_value='100.10.1') + mock_pip_install = MagicMock(return_value={"retcode": 0, "stdout": ""}) + + with patch.dict(pip_state.__salt__, { + "pip.list": mock_pip_list, + "pip.version": mock_pip_version, + "pip.install": mock_pip_install + }): + with caplog.at_level(logging.WARNING): + # Call pip.installed with a specifically 'broken' pip.list. + # pip.installed should continue, but log the exception from pip.list. + # pip.installed should NOT raise an exception itself. + # noinspection PyBroadException + try: + pip_state.installed( + name=pkg_to_install, + use_wheel=False, # Set False to simplify testing + no_use_wheel=False, # ' + no_binary=False, # ' + log=None # Regression will cause this function call to throw + # an AttributeError + ) + except AttributeError: + # Observed behavior in #64169 + assert False + except: + # Something went wrong, but it isn't what's being tested for here. + return + + # Take 64169 further and actually confirm that the targeted exception from pip.list got logged. + assert exception_message in caplog.messages + + # Confirm that the state continued to install the package as expected. + # Only check the 'pkgs' parameter of pip.install + mock_install_call_args, mock_install_call_kwargs = mock_pip_install.call_args + assert mock_install_call_kwargs['pkgs'] == pkg_to_install From 071a65fb10a72e23b9e22d11f7da6957b2c05f7c Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Mon, 1 May 2023 10:55:29 -0500 Subject: [PATCH 11/25] Rename Global Logger log to logger in pip_state.py --- changelog/64169.fixed.md | 1 + salt/states/pip_state.py | 14 ++++++++------ tests/pytests/unit/states/test_pip.py | 11 +++++++++-- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/changelog/64169.fixed.md b/changelog/64169.fixed.md index 499b94b693b..fe80eff1e94 100644 --- a/changelog/64169.fixed.md +++ b/changelog/64169.fixed.md @@ -1 +1,2 @@ Call global logger when catching pip.list exceptions in states.pip.installed +Rename gloabl logger `log` to `logger` inside pip_state \ No newline at end of file diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py index fd99d6bd626..cc5d877c06e 100644 --- a/salt/states/pip_state.py +++ b/salt/states/pip_state.py @@ -114,7 +114,7 @@ if HAS_PIP is True: # pylint: enable=import-error -log = logging.getLogger(__name__) +logger = logging.getLogger(__name__) # Define the module's virtual name __virtualname__ = "pip" @@ -189,10 +189,10 @@ def _check_pkg_version_format(pkg): # vcs+URL urls are not properly parsed. # The next line is meant to trigger an AttributeError and # handle lower pip versions - log.debug("Installed pip version: %s", pip.__version__) + logger.debug("Installed pip version: %s", pip.__version__) install_req = _from_line(pkg) except AttributeError: - log.debug("Installed pip version is lower than 1.2") + logger.debug("Installed pip version is lower than 1.2") supported_vcs = ("git", "svn", "hg", "bzr") if pkg.startswith(supported_vcs): for vcs in supported_vcs: @@ -351,7 +351,7 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False): making the comparison. """ if HAS_PKG_RESOURCES is False: - log.warning( + logger.warning( "The pkg_resources packages was not loaded. Please install setuptools." ) return None @@ -367,7 +367,7 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False): if pkg_resources.parse_version(pkg1) > pkg_resources.parse_version(pkg2): return 1 except Exception as exc: # pylint: disable=broad-except - log.exception(exc) + logger.exception(f'Comparison of package versions "{pkg1}" and "{pkg2}" failed: {exc}') return None @@ -852,7 +852,9 @@ def installed( ) # If we fail, then just send False, and we'll try again in the next function call except Exception as exc: # pylint: disable=broad-except - globals().get("log").exception(exc) + logger.exception( + f'Pre-caching of PIP packages during states.pip.installed failed by exception from pip.list: {exc}' + ) pip_list = False for prefix, state_pkg_name, version_spec in pkgs_details: diff --git a/tests/pytests/unit/states/test_pip.py b/tests/pytests/unit/states/test_pip.py index 7e04602ce44..1b6d8afb364 100644 --- a/tests/pytests/unit/states/test_pip.py +++ b/tests/pytests/unit/states/test_pip.py @@ -60,8 +60,15 @@ def test_issue_64169(caplog): # Something went wrong, but it isn't what's being tested for here. return - # Take 64169 further and actually confirm that the targeted exception from pip.list got logged. - assert exception_message in caplog.messages + # Take 64169 further and actually confirm that the exception from pip.list got logged. + exc_msg_present = False + for log_line in caplog.messages: + # The exception must be somewhere in the log, but may optionally not be on a line by itself. + if exception_message in log_line: + exc_msg_present = True + break + + assert exc_msg_present # Confirm that the state continued to install the package as expected. # Only check the 'pkgs' parameter of pip.install From a467c04d04695b7f61b530668736d84e6a3e1da8 Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Mon, 1 May 2023 15:51:25 -0500 Subject: [PATCH 12/25] Clarify Failing Test Message; Search for Entire Log Line in caplog --- tests/pytests/unit/states/test_pip.py | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/tests/pytests/unit/states/test_pip.py b/tests/pytests/unit/states/test_pip.py index 1b6d8afb364..7d93faa3eb8 100644 --- a/tests/pytests/unit/states/test_pip.py +++ b/tests/pytests/unit/states/test_pip.py @@ -53,22 +53,19 @@ def test_issue_64169(caplog): log=None # Regression will cause this function call to throw # an AttributeError ) - except AttributeError: + except AttributeError as exc: # Observed behavior in #64169 - assert False + pytest.fail( + 'Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: ' + f'{exc}' + ) except: # Something went wrong, but it isn't what's being tested for here. return # Take 64169 further and actually confirm that the exception from pip.list got logged. - exc_msg_present = False - for log_line in caplog.messages: - # The exception must be somewhere in the log, but may optionally not be on a line by itself. - if exception_message in log_line: - exc_msg_present = True - break - - assert exc_msg_present + assert 'Pre-caching of PIP packages during states.pip.installed failed by exception ' \ + f'from pip.list: {exception_message}' in caplog.messages # Confirm that the state continued to install the package as expected. # Only check the 'pkgs' parameter of pip.install From db1406a85fca0925f67a7989a1f58d8e928beb2b Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Mon, 1 May 2023 15:53:52 -0500 Subject: [PATCH 13/25] Fix Changelog Typo --- changelog/64169.fixed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/64169.fixed.md b/changelog/64169.fixed.md index fe80eff1e94..d6ce2bf1937 100644 --- a/changelog/64169.fixed.md +++ b/changelog/64169.fixed.md @@ -1,2 +1,2 @@ Call global logger when catching pip.list exceptions in states.pip.installed -Rename gloabl logger `log` to `logger` inside pip_state \ No newline at end of file +Rename global logger `log` to `logger` inside pip_state \ No newline at end of file From 926270054d7b8694a2b17f18d2a924a65650832b Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Mon, 1 May 2023 16:08:55 -0500 Subject: [PATCH 14/25] Remove Silent Catch --- tests/pytests/unit/states/test_pip.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tests/pytests/unit/states/test_pip.py b/tests/pytests/unit/states/test_pip.py index 7d93faa3eb8..a7cdc106e62 100644 --- a/tests/pytests/unit/states/test_pip.py +++ b/tests/pytests/unit/states/test_pip.py @@ -59,9 +59,6 @@ def test_issue_64169(caplog): 'Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: ' f'{exc}' ) - except: - # Something went wrong, but it isn't what's being tested for here. - return # Take 64169 further and actually confirm that the exception from pip.list got logged. assert 'Pre-caching of PIP packages during states.pip.installed failed by exception ' \ From 724fc208248c705dd472bdf5ce27992bed9f08cd Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Tue, 2 May 2023 13:22:13 -0500 Subject: [PATCH 15/25] Run Black Pre-Commit Step --- salt/states/pip_state.py | 16 ++++--- tests/pytests/unit/states/test_pip.py | 61 ++++++++++++++------------- 2 files changed, 40 insertions(+), 37 deletions(-) diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py index cc5d877c06e..de75057adf4 100644 --- a/salt/states/pip_state.py +++ b/salt/states/pip_state.py @@ -251,7 +251,7 @@ def _check_if_installed( index_url, extra_index_url, pip_list=False, - **kwargs + **kwargs, ): """ Takes a package name and version specification (if any) and checks it is @@ -367,7 +367,9 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False): if pkg_resources.parse_version(pkg1) > pkg_resources.parse_version(pkg2): return 1 except Exception as exc: # pylint: disable=broad-except - logger.exception(f'Comparison of package versions "{pkg1}" and "{pkg2}" failed: {exc}') + logger.exception( + f'Comparison of package versions "{pkg1}" and "{pkg2}" failed: {exc}' + ) return None @@ -418,7 +420,7 @@ def installed( cache_dir=None, no_binary=None, extra_args=None, - **kwargs + **kwargs, ): """ Make sure the package is installed @@ -853,7 +855,7 @@ def installed( # If we fail, then just send False, and we'll try again in the next function call except Exception as exc: # pylint: disable=broad-except logger.exception( - f'Pre-caching of PIP packages during states.pip.installed failed by exception from pip.list: {exc}' + f"Pre-caching of PIP packages during states.pip.installed failed by exception from pip.list: {exc}" ) pip_list = False @@ -874,7 +876,7 @@ def installed( index_url, extra_index_url, pip_list, - **kwargs + **kwargs, ) # If _check_if_installed result is None, something went wrong with # the command running. This way we keep stateful output. @@ -980,7 +982,7 @@ def installed( no_cache_dir=no_cache_dir, extra_args=extra_args, disable_version_check=True, - **kwargs + **kwargs, ) if pip_install_call and pip_install_call.get("retcode", 1) == 0: @@ -1045,7 +1047,7 @@ def installed( user=user, cwd=cwd, env_vars=env_vars, - **kwargs + **kwargs, ) ) diff --git a/tests/pytests/unit/states/test_pip.py b/tests/pytests/unit/states/test_pip.py index a7cdc106e62..307ba5e1e65 100644 --- a/tests/pytests/unit/states/test_pip.py +++ b/tests/pytests/unit/states/test_pip.py @@ -12,33 +12,33 @@ from tests.support.mock import MagicMock, patch @pytest.fixture def configure_loader_modules(): - return { - pip_state: { - '__env__': 'base', - '__opts__': { - 'test': False - } - } - } + return {pip_state: {"__env__": "base", "__opts__": {"test": False}}} def test_issue_64169(caplog): - pkg_to_install = 'nonexistent_package' - exception_message = 'Invalid JSON (test_issue_64169)' + pkg_to_install = "nonexistent_package" + exception_message = "Invalid JSON (test_issue_64169)" - mock_pip_list = MagicMock(side_effect=[ - CommandExecutionError(exception_message), # pre-cache the pip list (preinstall) - {}, # Checking if the pkg is already installed - {pkg_to_install: '100.10.1'} # Confirming successful installation - ]) - mock_pip_version = MagicMock(return_value='100.10.1') + mock_pip_list = MagicMock( + side_effect=[ + CommandExecutionError( + exception_message + ), # pre-cache the pip list (preinstall) + {}, # Checking if the pkg is already installed + {pkg_to_install: "100.10.1"}, # Confirming successful installation + ] + ) + mock_pip_version = MagicMock(return_value="100.10.1") mock_pip_install = MagicMock(return_value={"retcode": 0, "stdout": ""}) - with patch.dict(pip_state.__salt__, { - "pip.list": mock_pip_list, - "pip.version": mock_pip_version, - "pip.install": mock_pip_install - }): + with patch.dict( + pip_state.__salt__, + { + "pip.list": mock_pip_list, + "pip.version": mock_pip_version, + "pip.install": mock_pip_install, + }, + ): with caplog.at_level(logging.WARNING): # Call pip.installed with a specifically 'broken' pip.list. # pip.installed should continue, but log the exception from pip.list. @@ -47,24 +47,25 @@ def test_issue_64169(caplog): try: pip_state.installed( name=pkg_to_install, - use_wheel=False, # Set False to simplify testing + use_wheel=False, # Set False to simplify testing no_use_wheel=False, # ' - no_binary=False, # ' - log=None # Regression will cause this function call to throw - # an AttributeError + no_binary=False, # ' + log=None, # Regression will cause this function call to throw an AttributeError ) except AttributeError as exc: # Observed behavior in #64169 pytest.fail( - 'Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: ' - f'{exc}' + "Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: " + f"{exc}" ) # Take 64169 further and actually confirm that the exception from pip.list got logged. - assert 'Pre-caching of PIP packages during states.pip.installed failed by exception ' \ - f'from pip.list: {exception_message}' in caplog.messages + assert ( + "Pre-caching of PIP packages during states.pip.installed failed by exception " + f"from pip.list: {exception_message}" in caplog.messages + ) # Confirm that the state continued to install the package as expected. # Only check the 'pkgs' parameter of pip.install mock_install_call_args, mock_install_call_kwargs = mock_pip_install.call_args - assert mock_install_call_kwargs['pkgs'] == pkg_to_install + assert mock_install_call_kwargs["pkgs"] == pkg_to_install From 83cadc12f560b0c839fcb7dbbe8bcae46eb67c6c Mon Sep 17 00:00:00 2001 From: Eric Graham Date: Wed, 3 May 2023 09:39:26 -0500 Subject: [PATCH 16/25] Add New Line to Changelog --- changelog/64169.fixed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/64169.fixed.md b/changelog/64169.fixed.md index d6ce2bf1937..e8631285aaa 100644 --- a/changelog/64169.fixed.md +++ b/changelog/64169.fixed.md @@ -1,2 +1,2 @@ Call global logger when catching pip.list exceptions in states.pip.installed -Rename global logger `log` to `logger` inside pip_state \ No newline at end of file +Rename global logger `log` to `logger` inside pip_state From bd57d085ad8b0d92e78ed15d701464179ce598ed Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Wed, 10 May 2023 14:02:43 -0500 Subject: [PATCH 17/25] Update banners and links --- doc/_themes/saltstack2/layout.html | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/doc/_themes/saltstack2/layout.html b/doc/_themes/saltstack2/layout.html index 04bff89e1fb..001844f7cd2 100644 --- a/doc/_themes/saltstack2/layout.html +++ b/doc/_themes/saltstack2/layout.html @@ -152,7 +152,7 @@ - +
- +
{% endif %} @@ -295,7 +295,7 @@ {% else %} {% endif %} #}--> - + {% if build_type=="next" %} From cb396fe805f31a779f8f4d47dd3e4e72a20ae9fc Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 19 May 2023 02:04:24 -0700 Subject: [PATCH 18/25] Ubuntu pkg tests run apt non-interactive mode. Issue #64307 --- pkg/tests/support/helpers.py | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 57b6ccd4d00..f4f26f0781a 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -596,8 +596,26 @@ class SaltPkgInstall: self.proc.run("launchctl", "disable", f"system/{service_name}") self.proc.run("launchctl", "bootout", "system", str(plist_file)) elif upgrade: + env = os.environ.copy() + extra_args = [] + if self.distro_id in ("ubuntu", "debian"): + env["DEBIAN_FRONTEND"] = "noninteractive" + extra_args = [ + "-o", + "DPkg::Options::=--force-confdef", + "-o", + "DPkg::Options::=--force-confold", + ] log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) - ret = self.proc.run(self.pkg_mngr, "upgrade", "-y", *self.pkgs) + args = extra_args + self.pkgs + ret = self.proc.run( + self.pkg_mngr, + "upgrade", + "-y", + *args, + _timeout=120, + env=env, + ) else: log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) From 9dffea3178a0c183aafd322058e8bed8826441bd Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 22 May 2023 03:15:56 -0700 Subject: [PATCH 19/25] Check return code instead of stdout --- pkg/tests/support/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index f4f26f0781a..9853c441870 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -621,7 +621,7 @@ class SaltPkgInstall: ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) if not platform.is_darwin() and not platform.is_windows(): # Make sure we don't have any trailing references to old package file locations - assert "No such file or directory" not in ret.stdout + ret.returncode == 0 assert "/saltstack/salt/run" not in ret.stdout log.info(ret) self._check_retcode(ret) From eb71862449b93f496c678d892e0c0ad827278136 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 16:35:31 +0100 Subject: [PATCH 20/25] Sometimes the first page does not have any results. Try next page if there's a next token. Signed-off-by: Pedro Algarvio --- tools/vm.py | 75 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 42 insertions(+), 33 deletions(-) diff --git a/tools/vm.py b/tools/vm.py index 944f2fe6cc2..f7b2837ae1b 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -720,41 +720,50 @@ class VM: client = boto3.client("ec2", region_name=self.region_name) # Let's search for the launch template corresponding to this AMI launch_template_name = None + next_token = "" try: - response = response = client.describe_launch_templates( - Filters=[ - { - "Name": "tag:spb:is-golden-image-template", - "Values": ["true"], - }, - { - "Name": "tag:spb:project", - "Values": ["salt-project"], - }, - { - "Name": "tag:spb:environment", - "Values": [environment], - }, - { - "Name": "tag:spb:image-id", - "Values": [self.config.ami], - }, - ] - ) - log.debug( - "Search for launch template response:\n%s", pprint.pformat(response) - ) - for details in response.get("LaunchTemplates"): - if launch_template_name is not None: - log.warning( - "Multiple launch templates for the same AMI. This is not " - "supposed to happen. Picked the first one listed: %s", - response, - ) - break - launch_template_name = details["LaunchTemplateName"] + while True: + response = response = client.describe_launch_templates( + Filters=[ + { + "Name": "tag:spb:is-golden-image-template", + "Values": ["true"], + }, + { + "Name": "tag:spb:project", + "Values": ["salt-project"], + }, + { + "Name": "tag:spb:environment", + "Values": [environment], + }, + { + "Name": "tag:spb:image-id", + "Values": [self.config.ami], + }, + ], + NextToken=next_token, + ) + log.debug( + "Search for launch template response:\n%s", + pprint.pformat(response), + ) + for details in response.get("LaunchTemplates"): + if launch_template_name is not None: + log.warning( + "Multiple launch templates for the same AMI. This is not " + "supposed to happen. Picked the first one listed: %s", + response, + ) + break + launch_template_name = details["LaunchTemplateName"] - if launch_template_name is None: + if launch_template_name is not None: + break + + next_token = response.get("NextToken") + if next_token: + continue self.ctx.error(f"Could not find a launch template for {self.name!r}") self.ctx.exit(1) except ClientError as exc: From d933bec9892c6cfc729218d3b11a4e1d169e385f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 11:34:49 +0100 Subject: [PATCH 21/25] Try harder to detect what is the target release for changelog generation. Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 10 +++- .github/workflows/nightly.yml | 10 +++- .github/workflows/scheduled.yml | 10 +++- .github/workflows/staging.yml | 10 +++- .github/workflows/templates/ci.yml.jinja | 4 +- .github/workflows/templates/layout.yml.jinja | 7 +++ cicd/shared-gh-workflows-context.yml | 2 + tools/ci.py | 56 ++++++++++++++++++++ 8 files changed, 99 insertions(+), 10 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d4afb4f49df..a286234889a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -39,6 +39,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }} steps: - uses: actions/checkout@v3 with: @@ -243,6 +244,11 @@ jobs: id: set-cache-seed run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + + - name: Get Release Changelog Target + id: get-release-changelog-target + run: | + tools ci get-release-changelog-target ${{ github.event_name }} pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -320,7 +326,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --next-release --template-only else tools changelog update-release-notes --template-only @@ -330,7 +336,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --draft --next-release tools changelog update-release-notes --next-release else diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 69edd3bc2ae..1bdcfe65086 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -92,6 +92,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }} steps: - uses: actions/checkout@v3 with: @@ -296,6 +297,11 @@ jobs: id: set-cache-seed run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + + - name: Get Release Changelog Target + id: get-release-changelog-target + run: | + tools ci get-release-changelog-target ${{ github.event_name }} pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -373,7 +379,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --next-release --template-only else tools changelog update-release-notes --template-only @@ -383,7 +389,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --draft --next-release tools changelog update-release-notes --next-release else diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 7e45d9a095b..368089af2df 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -82,6 +82,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }} steps: - uses: actions/checkout@v3 with: @@ -286,6 +287,11 @@ jobs: id: set-cache-seed run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + + - name: Get Release Changelog Target + id: get-release-changelog-target + run: | + tools ci get-release-changelog-target ${{ github.event_name }} pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -363,7 +369,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --next-release --template-only else tools changelog update-release-notes --template-only @@ -373,7 +379,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --draft --next-release tools changelog update-release-notes --next-release else diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index dba7145bc1e..424f47c363e 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -72,6 +72,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }} steps: - uses: actions/checkout@v3 with: @@ -282,6 +283,11 @@ jobs: id: set-cache-seed run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + + - name: Get Release Changelog Target + id: get-release-changelog-target + run: | + tools ci get-release-changelog-target ${{ github.event_name }} pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -363,7 +369,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --next-release --template-only else tools changelog update-release-notes --template-only @@ -373,7 +379,7 @@ jobs: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --draft --release --next-release tools changelog update-release-notes --release --next-release else diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 080967fa583..2ed95a9218f 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -116,7 +116,7 @@ on: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --next-release --template-only else tools changelog update-release-notes --template-only @@ -126,7 +126,7 @@ on: shell: bash if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - if [ "${{ github.base_ref || github.ref_name }}" == "master" ]; then + if [ "${{ needs.prepare-workflow.outputs.release-changelog-target }}" == "next-major-release" ]; then tools changelog update-release-notes --draft <%- if prepare_actual_release %> --release <%- endif %> --next-release tools changelog update-release-notes <%- if prepare_actual_release %> --release <%- endif %> --next-release else diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 59c2493b485..8fa64b89ad9 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -91,6 +91,7 @@ jobs: cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} releases: ${{ steps.get-salt-releases.outputs.releases }} + release-changelog-target: ${{ steps.get-release-changelog-target.outputs.release-changelog-target }} steps: - uses: actions/checkout@v3 with: @@ -305,6 +306,12 @@ jobs: id: set-cache-seed run: | echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + + - name: Get Release Changelog Target + id: get-release-changelog-target + run: | + tools ci get-release-changelog-target ${{ github.event_name }} + <%- endblock prepare_workflow_job %> <%- endif %> diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index ec3d939fe03..f304a534af8 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -2,3 +2,5 @@ python_version_linux: "3.10.11" python_version_macos: "3.10.11" python_version_windows: "3.10.11" relenv_version: "0.12.3" +release-branches: + - "3006.x" diff --git a/tools/ci.py b/tools/ci.py index ba7a7c2f849..db83c4e776f 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -11,6 +11,7 @@ import pathlib import time from typing import TYPE_CHECKING +import yaml from ptscripts import Context, command_group import tools.utils @@ -672,3 +673,58 @@ def get_releases(ctx: Context, repository: str = "saltstack/salt"): wfh.write(f"latest-release={latest}\n") wfh.write(f"releases={json.dumps(str_releases)}\n") ctx.exit(0) + + +@ci.command( + name="get-release-changelog-target", + arguments={ + "event_name": { + "help": "The name of the GitHub event being processed.", + }, + }, +) +def get_release_changelog_target(ctx: Context, event_name: str): + """ + Define which kind of release notes should be generated, next minor or major. + """ + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is None: + ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert gh_event_path is not None + + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) + ctx.exit(1) + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_output is not None + + shared_context_file = ( + tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" + ) + shared_context = yaml.safe_load(shared_context_file.read_text()) + release_branches = shared_context["release-branches"] + + release_changelog_target = "next-major-release" + if event_name == "pull_request": + if gh_event["pull_request"]["base"]["ref"] in release_branches: + release_changelog_target = "next-minor-release" + + else: + for branch_name in release_branches: + if branch_name in gh_event["ref"]: + release_changelog_target = "next-minor-release" + break + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-changelog-target={release_changelog_target}\n") + ctx.exit(0) From a81f58f37d21861567dc3d7feb22eaca2c03efd0 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 11:41:06 +0100 Subject: [PATCH 22/25] Define a variable for the path to `shared-gh-workflows-context.yml` Signed-off-by: Pedro Algarvio --- tools/ci.py | 5 ++--- tools/pkg/build.py | 5 +---- tools/pre_commit.py | 5 ++--- tools/utils.py | 3 +++ 4 files changed, 8 insertions(+), 10 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index db83c4e776f..a554d798ec5 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -709,10 +709,9 @@ def get_release_changelog_target(ctx: Context, event_name: str): if TYPE_CHECKING: assert github_output is not None - shared_context_file = ( - tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" + shared_context = yaml.safe_load( + tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text() ) - shared_context = yaml.safe_load(shared_context_file.read_text()) release_branches = shared_context["release-branches"] release_changelog_target = "next-major-release" diff --git a/tools/pkg/build.py b/tools/pkg/build.py index b373338a99e..8a99ba5eca4 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -30,10 +30,7 @@ build = command_group( def _get_shared_constants(): - shared_constants = ( - tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" - ) - return yaml.safe_load(shared_constants.read_text()) + return yaml.safe_load(tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text()) @build.command( diff --git a/tools/pre_commit.py b/tools/pre_commit.py index af054876d80..c272d26821f 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -116,10 +116,9 @@ def generate_workflows(ctx: Context): "prepare_workflow_needs": NeedsTracker(), "build_repo_needs": NeedsTracker(), } - shared_context_file = ( - tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" + shared_context = yaml.safe_load( + tools.utils.SHARED_WORKFLOW_CONTEXT_FILEPATH.read_text() ) - shared_context = yaml.safe_load(shared_context_file.read_text()) for key, value in shared_context.items(): context[key] = value loaded_template = env.get_template(template_path.name) diff --git a/tools/utils.py b/tools/utils.py index 28a79745844..8369d25eafe 100644 --- a/tools/utils.py +++ b/tools/utils.py @@ -38,6 +38,9 @@ SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod" STAGING_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-staging" RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release" BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup" +SHARED_WORKFLOW_CONTEXT_FILEPATH = ( + REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" +) class UpdateProgress: From f3edefc93a1f5c5ef927a2e97f9461b591701e28 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 11:05:12 +0100 Subject: [PATCH 23/25] Update to latest golden images Signed-off-by: Pedro Algarvio --- cicd/amis.yml | 2 +- cicd/golden-images.json | 132 ++++++++++++++++++++-------------------- 2 files changed, 67 insertions(+), 67 deletions(-) diff --git a/cicd/amis.yml b/cicd/amis.yml index 47edcf0184b..8fb4513180f 100644 --- a/cicd/amis.yml +++ b/cicd/amis.yml @@ -1 +1 @@ -centosstream-9-x86_64: ami-044545f7a74d46acc +centosstream-9-x86_64: ami-0bd92f4dca5d74017 diff --git a/cicd/golden-images.json b/cicd/golden-images.json index 75341e64aeb..02c3ee0977c 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -1,8 +1,8 @@ { "almalinux-8-arm64": { - "ami": "ami-0fc1e14bf9ff422aa", + "ami": "ami-05c1d3dbdeeb94bc6", "ami_description": "CI Image of AlmaLinux 8 arm64", - "ami_name": "salt-project/ci/almalinux/8/arm64/20230418.1731", + "ami_name": "salt-project/ci/almalinux/8/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -10,9 +10,9 @@ "ssh_username": "ec2-user" }, "almalinux-8": { - "ami": "ami-0bae4158c1f126271", + "ami": "ami-0ec1cbc531f10105b", "ami_description": "CI Image of AlmaLinux 8 x86_64", - "ami_name": "salt-project/ci/almalinux/8/x86_64/20230418.1732", + "ami_name": "salt-project/ci/almalinux/8/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -20,9 +20,9 @@ "ssh_username": "ec2-user" }, "almalinux-9-arm64": { - "ami": "ami-08f4d0fbf5d53c3ab", + "ami": "ami-036c495af9dfcf852", "ami_description": "CI Image of AlmaLinux 9 arm64", - "ami_name": "salt-project/ci/almalinux/9/arm64/20230418.1732", + "ami_name": "salt-project/ci/almalinux/9/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -30,9 +30,9 @@ "ssh_username": "ec2-user" }, "almalinux-9": { - "ami": "ami-00404c1cc5c5a08bd", + "ami": "ami-0dbc7030666419671", "ami_description": "CI Image of AlmaLinux 9 x86_64", - "ami_name": "salt-project/ci/almalinux/9/x86_64/20230418.1738", + "ami_name": "salt-project/ci/almalinux/9/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -40,9 +40,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2-arm64": { - "ami": "ami-05fbdb644d06c27b6", + "ami": "ami-022232915c2a5f2d0", "ami_description": "CI Image of AmazonLinux 2 arm64", - "ami_name": "salt-project/ci/amazonlinux/2/arm64/20230418.1717", + "ami_name": "salt-project/ci/amazonlinux/2/arm64/20230522.0621", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -50,9 +50,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2": { - "ami": "ami-014171e6c30ec8387", + "ami": "ami-0695f87baa5b5ce15", "ami_description": "CI Image of AmazonLinux 2 x86_64", - "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230418.1718", + "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230522.0620", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -60,9 +60,9 @@ "ssh_username": "ec2-user" }, "archlinux-lts": { - "ami": "ami-00cff81ed2e2fb0f4", + "ami": "ami-0f6424847f98afc04", "ami_description": "CI Image of ArchLinux lts x86_64", - "ami_name": "salt-project/ci/archlinux/lts/x86_64/20230418.1717", + "ami_name": "salt-project/ci/archlinux/lts/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "false", "instance_type": "t3a.large", @@ -70,9 +70,9 @@ "ssh_username": "arch" }, "centos-7-arm64": { - "ami": "ami-051cef43c13fcc0c9", + "ami": "ami-0908831c364e33a37", "ami_description": "CI Image of CentOS 7 arm64", - "ami_name": "salt-project/ci/centos/7/arm64/20230418.1743", + "ami_name": "salt-project/ci/centos/7/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -80,9 +80,9 @@ "ssh_username": "centos" }, "centos-7": { - "ami": "ami-0dcc94e1bea829149", + "ami": "ami-0ace33028ada62ddb", "ami_description": "CI Image of CentOS 7 x86_64", - "ami_name": "salt-project/ci/centos/7/x86_64/20230418.1743", + "ami_name": "salt-project/ci/centos/7/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -90,9 +90,9 @@ "ssh_username": "centos" }, "centosstream-8-arm64": { - "ami": "ami-02783136c1080c782", + "ami": "ami-0b30827dc592b2695", "ami_description": "CI Image of CentOSStream 8 arm64", - "ami_name": "salt-project/ci/centosstream/8/arm64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/8/arm64/20230522.0618", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -100,9 +100,9 @@ "ssh_username": "centos" }, "centosstream-8": { - "ami": "ami-055e35dc7180defad", + "ami": "ami-0929882a7e5cfba5f", "ami_description": "CI Image of CentOSStream 8 x86_64", - "ami_name": "salt-project/ci/centosstream/8/x86_64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/8/x86_64/20230522.0618", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -110,9 +110,9 @@ "ssh_username": "centos" }, "centosstream-9-arm64": { - "ami": "ami-06fd13f7c7c702fc4", + "ami": "ami-00700fb8821b8b8c7", "ami_description": "CI Image of CentOSStream 9 arm64", - "ami_name": "salt-project/ci/centosstream/9/arm64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/9/arm64/20230522.0619", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -120,9 +120,9 @@ "ssh_username": "ec2-user" }, "centosstream-9": { - "ami": "ami-044545f7a74d46acc", + "ami": "ami-0bd92f4dca5d74017", "ami_description": "CI Image of CentOSStream 9 x86_64", - "ami_name": "salt-project/ci/centosstream/9/x86_64/20230418.1717", + "ami_name": "salt-project/ci/centosstream/9/x86_64/20230522.0619", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -130,9 +130,9 @@ "ssh_username": "ec2-user" }, "debian-10-arm64": { - "ami": "ami-045aedc47e7ddfbf1", + "ami": "ami-0f681fc9d5de0c3df", "ami_description": "CI Image of Debian 10 arm64", - "ami_name": "salt-project/ci/debian/10/arm64/20230418.1739", + "ami_name": "salt-project/ci/debian/10/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -140,9 +140,9 @@ "ssh_username": "admin" }, "debian-10": { - "ami": "ami-0a205a9361210b291", + "ami": "ami-0dcf5610590139238", "ami_description": "CI Image of Debian 10 x86_64", - "ami_name": "salt-project/ci/debian/10/x86_64/20230418.1739", + "ami_name": "salt-project/ci/debian/10/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -150,9 +150,9 @@ "ssh_username": "admin" }, "debian-11-arm64": { - "ami": "ami-0be71acc27a8efa60", + "ami": "ami-062b4bf11a864825c", "ami_description": "CI Image of Debian 11 arm64", - "ami_name": "salt-project/ci/debian/11/arm64/20230418.1739", + "ami_name": "salt-project/ci/debian/11/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -160,9 +160,9 @@ "ssh_username": "admin" }, "debian-11": { - "ami": "ami-0ad354da27b34289b", + "ami": "ami-0f400e5fa6806bbca", "ami_description": "CI Image of Debian 11 x86_64", - "ami_name": "salt-project/ci/debian/11/x86_64/20230418.1742", + "ami_name": "salt-project/ci/debian/11/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -170,9 +170,9 @@ "ssh_username": "admin" }, "fedora-36-arm64": { - "ami": "ami-00c0ab2829c887922", + "ami": "ami-06dbaabd32b4c2502", "ami_description": "CI Image of Fedora 36 arm64", - "ami_name": "salt-project/ci/fedora/36/arm64/20230418.1726", + "ami_name": "salt-project/ci/fedora/36/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -180,9 +180,9 @@ "ssh_username": "fedora" }, "fedora-36": { - "ami": "ami-0185a1189bff7c771", + "ami": "ami-0b55732c36731876f", "ami_description": "CI Image of Fedora 36 x86_64", - "ami_name": "salt-project/ci/fedora/36/x86_64/20230418.1726", + "ami_name": "salt-project/ci/fedora/36/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -190,9 +190,9 @@ "ssh_username": "fedora" }, "fedora-37-arm64": { - "ami": "ami-075c52fda843ace1b", + "ami": "ami-0d71d6f2b0869842f", "ami_description": "CI Image of Fedora 37 arm64", - "ami_name": "salt-project/ci/fedora/37/arm64/20230418.1726", + "ami_name": "salt-project/ci/fedora/37/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -200,9 +200,9 @@ "ssh_username": "fedora" }, "fedora-37": { - "ami": "ami-099a68403d6c65733", + "ami": "ami-026f494dd4b9d40e8", "ami_description": "CI Image of Fedora 37 x86_64", - "ami_name": "salt-project/ci/fedora/37/x86_64/20230418.1726", + "ami_name": "salt-project/ci/fedora/37/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -210,9 +210,9 @@ "ssh_username": "fedora" }, "fedora-38-arm64": { - "ami": "ami-02fa22d081a9be052", + "ami": "ami-01ba8a7951daf68fb", "ami_description": "CI Image of Fedora 38 arm64", - "ami_name": "salt-project/ci/fedora/38/arm64/20230418.1727", + "ami_name": "salt-project/ci/fedora/38/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -220,9 +220,9 @@ "ssh_username": "fedora" }, "fedora-38": { - "ami": "ami-0a8d949d0bb15bbc0", + "ami": "ami-0699dbe70b69e96aa", "ami_description": "CI Image of Fedora 38 x86_64", - "ami_name": "salt-project/ci/fedora/38/x86_64/20230418.1727", + "ami_name": "salt-project/ci/fedora/38/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -230,9 +230,9 @@ "ssh_username": "fedora" }, "opensuse-15": { - "ami": "ami-089ac311f924f131f", + "ami": "ami-0c594da84f6e1cd96", "ami_description": "CI Image of Opensuse 15 x86_64", - "ami_name": "salt-project/ci/opensuse/15/x86_64/20230418.1744", + "ami_name": "salt-project/ci/opensuse/15/x86_64/20230522.0619", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -240,9 +240,9 @@ "ssh_username": "ec2-user" }, "photonos-3": { - "ami": "ami-03ce6db789f90957b", + "ami": "ami-0db2ebdb9bc3400ef", "ami_description": "CI Image of PhotonOS 3 x86_64", - "ami_name": "salt-project/ci/photonos/3/x86_64/20230418.1717", + "ami_name": "salt-project/ci/photonos/3/x86_64/20230522.0617", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -250,9 +250,9 @@ "ssh_username": "root" }, "photonos-4": { - "ami": "ami-0ef9996c398479d65", + "ami": "ami-08a6b6bbf6779a538", "ami_description": "CI Image of PhotonOS 4 x86_64", - "ami_name": "salt-project/ci/photonos/4/x86_64/20230418.1717", + "ami_name": "salt-project/ci/photonos/4/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -260,9 +260,9 @@ "ssh_username": "root" }, "ubuntu-20.04-arm64": { - "ami": "ami-0c4d21e0772489c0d", + "ami": "ami-0dccc0de7a38cca90", "ami_description": "CI Image of Ubuntu 20.04 arm64", - "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230418.1728", + "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -270,9 +270,9 @@ "ssh_username": "ubuntu" }, "ubuntu-20.04": { - "ami": "ami-09ae6200865b29b9b", + "ami": "ami-05e51f893a626b579", "ami_description": "CI Image of Ubuntu 20.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230418.1728", + "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -280,9 +280,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04-arm64": { - "ami": "ami-024fe5d0b838f88f7", + "ami": "ami-0c958272da6c09ca6", "ami_description": "CI Image of Ubuntu 22.04 arm64", - "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230418.1731", + "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230522.0606", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -290,9 +290,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04": { - "ami": "ami-0d83f00f084d91451", + "ami": "ami-09e45f31ccafcdcec", "ami_description": "CI Image of Ubuntu 22.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230418.1732", + "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -300,9 +300,9 @@ "ssh_username": "ubuntu" }, "windows-2016": { - "ami": "ami-078d9229cfaf24d1b", + "ami": "ami-099db55543619f54a", "ami_description": "CI Image of Windows 2016 x86_64", - "ami_name": "salt-project/ci/windows/2016/x86_64/20230418.1717", + "ami_name": "salt-project/ci/windows/2016/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -310,9 +310,9 @@ "ssh_username": "Administrator" }, "windows-2019": { - "ami": "ami-0ab20823965e1aa7a", + "ami": "ami-0860ee5bc9ee93e13", "ami_description": "CI Image of Windows 2019 x86_64", - "ami_name": "salt-project/ci/windows/2019/x86_64/20230418.1717", + "ami_name": "salt-project/ci/windows/2019/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -320,9 +320,9 @@ "ssh_username": "Administrator" }, "windows-2022": { - "ami": "ami-054c4cf04c0f31eb1", + "ami": "ami-032e3abce2aa98da7", "ami_description": "CI Image of Windows 2022 x86_64", - "ami_name": "salt-project/ci/windows/2022/x86_64/20230418.1717", + "ami_name": "salt-project/ci/windows/2022/x86_64/20230522.0606", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", From 80ff2f662dc038f56b650e07ea0fb45a5db6bb0d Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 19:19:37 +0100 Subject: [PATCH 24/25] Skip `tests/unit/{modules,states}/test_zcbuildout.py` on windows. It needs special work on the golden images to get SSL to properly work. These steps are required because the code being tested is using `easy_install` which does not know how to get certificates from `certifi`. Since `easy_install` is too old, and deprecated, the extra work is not worth it, plus, they are still being tested on other platforms. Signed-off-by: Pedro Algarvio --- tests/unit/modules/test_zcbuildout.py | 9 +++++++++ tests/unit/states/test_zcbuildout.py | 9 +++++++++ 2 files changed, 18 insertions(+) diff --git a/tests/unit/modules/test_zcbuildout.py b/tests/unit/modules/test_zcbuildout.py index f793e3fc3f8..ac98435ffa0 100644 --- a/tests/unit/modules/test_zcbuildout.py +++ b/tests/unit/modules/test_zcbuildout.py @@ -19,6 +19,15 @@ from tests.support.mixins import LoaderModuleMockMixin from tests.support.runtests import RUNTIME_VARS from tests.support.unit import TestCase +pytestmark = [ + pytest.mark.skip_on_windows( + reason=( + "Special steps are required for proper SSL validation because " + "`easy_install` is too old(and deprecated)." + ) + ) +] + KNOWN_VIRTUALENV_BINARY_NAMES = ( "virtualenv", "virtualenv2", diff --git a/tests/unit/states/test_zcbuildout.py b/tests/unit/states/test_zcbuildout.py index db6013076d1..b5f919ac6b2 100644 --- a/tests/unit/states/test_zcbuildout.py +++ b/tests/unit/states/test_zcbuildout.py @@ -10,6 +10,15 @@ import salt.utils.path from tests.support.runtests import RUNTIME_VARS from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base +pytestmark = [ + pytest.mark.skip_on_windows( + reason=( + "Special steps are required for proper SSL validation because " + "`easy_install` is too old(and deprecated)." + ) + ) +] + @pytest.mark.skip_if_binaries_missing(*KNOWN_VIRTUALENV_BINARY_NAMES, check_all=False) @pytest.mark.requires_network From 6d918e15a33e9282f9dbb68760e1932dce7f26de Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 May 2023 19:24:59 +0100 Subject: [PATCH 25/25] Drop Fedora 36 which has reached EOL Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 20 ------- .github/workflows/nightly.yml | 20 ------- .github/workflows/release.yml | 40 ------------- .github/workflows/scheduled.yml | 20 ------- .github/workflows/staging.yml | 58 ------------------- .../test-pkg-repo-downloads.yml.jinja | 2 - .../templates/test-salt-pkg.yml.jinja | 1 - .../workflows/templates/test-salt.yml.jinja | 1 - cicd/golden-images.json | 20 ------- 9 files changed, 182 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a286234889a..11214d88d51 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1138,25 +1138,6 @@ jobs: skip-code-coverage: ${{ github.event_name == 'pull_request' }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: ${{ github.event_name == 'pull_request' }} - skip-junit-reports: ${{ github.event_name == 'pull_request' }} - fedora-37: name: Fedora 37 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1355,7 +1336,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 1bdcfe65086..cf41d2e358e 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1197,25 +1197,6 @@ jobs: skip-code-coverage: false skip-junit-reports: false - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: false - skip-junit-reports: false - fedora-37: name: Fedora 37 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2046,7 +2027,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 61fc1f5783e..f121d380e0a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -505,44 +505,6 @@ jobs: latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit - fedora-36-pkg-download-tests: - name: Test Fedora 36 Package Downloads - if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - needs: - - prepare-workflow - - publish-repositories - - download-onedir-artifact - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36 - platform: linux - arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: release - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - - fedora-36-arm64-pkg-download-tests: - name: Test Fedora 36 Arm64 Package Downloads - if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} - needs: - - prepare-workflow - - publish-repositories - - download-onedir-artifact - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36-arm64 - platform: linux - arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: release - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - fedora-37-pkg-download-tests: name: Test Fedora 37 Package Downloads if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} @@ -818,8 +780,6 @@ jobs: - debian-10-pkg-download-tests - debian-11-pkg-download-tests - debian-11-arm64-pkg-download-tests - - fedora-36-pkg-download-tests - - fedora-36-arm64-pkg-download-tests - fedora-37-pkg-download-tests - fedora-37-arm64-pkg-download-tests - fedora-38-pkg-download-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 368089af2df..8e725fcd9b2 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -1181,25 +1181,6 @@ jobs: skip-code-coverage: false skip-junit-reports: false - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: false - skip-junit-reports: false - fedora-37: name: Fedora 37 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1400,7 +1381,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 424f47c363e..7e7a492dfd4 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1187,25 +1187,6 @@ jobs: skip-code-coverage: true skip-junit-reports: true - fedora-36: - name: Fedora 36 - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-salt-onedir - uses: ./.github/workflows/test-action.yml - with: - distro-slug: fedora-36 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - pull-labels: ${{ needs.prepare-workflow.outputs.pull-labels }} - skip-code-coverage: true - skip-junit-reports: true - fedora-37: name: Fedora 37 if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2394,42 +2375,6 @@ jobs: latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" secrets: inherit - fedora-36-pkg-download-tests: - name: Test Fedora 36 Package Downloads - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - publish-repositories - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36 - platform: linux - arch: x86_64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: staging - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - - fedora-36-arm64-pkg-download-tests: - name: Test Fedora 36 Arm64 Package Downloads - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - publish-repositories - uses: ./.github/workflows/test-package-downloads-action-linux.yml - with: - distro-slug: fedora-36-arm64 - platform: linux - arch: aarch64 - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.11 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - environment: staging - skip-code-coverage: true - latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" - secrets: inherit - fedora-37-pkg-download-tests: name: Test Fedora 37 Package Downloads if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2686,7 +2631,6 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 - fedora-37 - fedora-38 - opensuse-15 @@ -2731,8 +2675,6 @@ jobs: - debian-10-pkg-download-tests - debian-11-pkg-download-tests - debian-11-arm64-pkg-download-tests - - fedora-36-pkg-download-tests - - fedora-36-arm64-pkg-download-tests - fedora-37-pkg-download-tests - fedora-37-arm64-pkg-download-tests - fedora-38-pkg-download-tests diff --git a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja index ac826f6e9fe..8ea9bfed3b7 100644 --- a/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja +++ b/.github/workflows/templates/test-pkg-repo-downloads.yml.jinja @@ -15,8 +15,6 @@ ("debian-10", "Debian 10", "x86_64"), ("debian-11", "Debian 11", "x86_64"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("fedora-36", "Fedora 36", "x86_64"), - ("fedora-36-arm64", "Fedora 36 Arm64", "aarch64"), ("fedora-37", "Fedora 37", "x86_64"), ("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"), ("fedora-38", "Fedora 38", "x86_64"), diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 99fc85db4fb..3970ac3d167 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -9,7 +9,6 @@ ("debian-10", "Debian 10", "x86_64", "deb"), ("debian-11", "Debian 11", "x86_64", "deb"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"), - ("fedora-36", "Fedora 36", "x86_64", "rpm"), ("fedora-37", "Fedora 37", "x86_64", "rpm"), ("fedora-38", "Fedora 38", "x86_64", "rpm"), ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"), diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index 2eb0fb5e50e..6ae89e0cb16 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -59,7 +59,6 @@ ("debian-10", "Debian 10", "x86_64"), ("debian-11", "Debian 11", "x86_64"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("fedora-36", "Fedora 36", "x86_64"), ("fedora-37", "Fedora 37", "x86_64"), ("fedora-38", "Fedora 38", "x86_64"), ("opensuse-15", "Opensuse 15", "x86_64"), diff --git a/cicd/golden-images.json b/cicd/golden-images.json index 02c3ee0977c..21c702ca732 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -169,26 +169,6 @@ "is_windows": "false", "ssh_username": "admin" }, - "fedora-36-arm64": { - "ami": "ami-06dbaabd32b4c2502", - "ami_description": "CI Image of Fedora 36 arm64", - "ami_name": "salt-project/ci/fedora/36/arm64/20230522.0606", - "arch": "arm64", - "cloudwatch-agent-available": "true", - "instance_type": "m6g.large", - "is_windows": "false", - "ssh_username": "fedora" - }, - "fedora-36": { - "ami": "ami-0b55732c36731876f", - "ami_description": "CI Image of Fedora 36 x86_64", - "ami_name": "salt-project/ci/fedora/36/x86_64/20230522.0606", - "arch": "x86_64", - "cloudwatch-agent-available": "true", - "instance_type": "t3a.large", - "is_windows": "false", - "ssh_username": "fedora" - }, "fedora-37-arm64": { "ami": "ami-0d71d6f2b0869842f", "ami_description": "CI Image of Fedora 37 arm64",