mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
We now also download package artifacts
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
parent
4f30001ca5
commit
5017a8e8ab
6 changed files with 662 additions and 406 deletions
|
@ -12,7 +12,8 @@ ptscripts.register_tools_module("tools.pkg.repo.create")
|
|||
ptscripts.register_tools_module("tools.pkg.repo.publish")
|
||||
ptscripts.register_tools_module("tools.pre_commit")
|
||||
ptscripts.register_tools_module("tools.release")
|
||||
ptscripts.register_tools_module("tools.ts")
|
||||
ptscripts.register_tools_module("tools.testsuite")
|
||||
ptscripts.register_tools_module("tools.testsuite.download")
|
||||
ptscripts.register_tools_module("tools.vm")
|
||||
|
||||
for name in ("boto3", "botocore", "urllib3"):
|
||||
|
|
143
tools/testsuite/__init__.py
Normal file
143
tools/testsuite/__init__.py
Normal file
|
@ -0,0 +1,143 @@
|
|||
"""
|
||||
These commands are related to the test suite.
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
import zipfile
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
import tools.utils.gh
|
||||
from tools.utils import ExitCode
|
||||
|
||||
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
|
||||
OS_SLUGS = sorted(json.load(rfh))
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
ts = command_group(name="ts", help="Test Suite Related Commands", description=__doc__)
|
||||
|
||||
|
||||
@ts.command(
|
||||
name="setup",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"metavar": "RUN_ID_NUMBER",
|
||||
},
|
||||
"branch": {
|
||||
"help": "The branch from where to look for artifacts.",
|
||||
"metavar": "BRANCH_NAME",
|
||||
},
|
||||
"pr": {
|
||||
"help": "The pull-request from where to look for artifacts.",
|
||||
"metavar": "PR_NUMBER",
|
||||
},
|
||||
"nightly": {
|
||||
"help": "The nightly build branch from where to look for artifacts.",
|
||||
"metavar": "BRANCH_NAME",
|
||||
},
|
||||
"platform": {
|
||||
"help": "The onedir platform artifact to download",
|
||||
"choices": ("linux", "darwin", "windows"),
|
||||
"required": True,
|
||||
},
|
||||
"arch": {
|
||||
"help": "The onedir artifact architecture",
|
||||
"choices": ("x86_64", "aarch64", "amd64", "x86"),
|
||||
},
|
||||
"slug": {
|
||||
"help": "The OS slug",
|
||||
"required": True,
|
||||
"choices": OS_SLUGS,
|
||||
},
|
||||
"pkg": {
|
||||
"help": "Also download package test artifacts",
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
},
|
||||
)
|
||||
def setup_testsuite(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
branch: str = None,
|
||||
nightly: str = None,
|
||||
pr: int = None,
|
||||
platform: str = None,
|
||||
arch="x86_64",
|
||||
slug: str = None,
|
||||
pkg: bool = False,
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Setup the local test suite.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert platform is not None
|
||||
assert slug is not None
|
||||
|
||||
mutually_exclusive_flags = [
|
||||
run_id is not None,
|
||||
branch is not None,
|
||||
pr is not None,
|
||||
nightly is not None,
|
||||
]
|
||||
if not any(mutually_exclusive_flags):
|
||||
ctx.error("Pass one of '--run-id', '--branch', '--pr' or '--nightly'")
|
||||
ctx.exit(1)
|
||||
if len(list(filter(None, mutually_exclusive_flags))) > 1:
|
||||
ctx.error("Pass only one of '--run-id', '--branch', '--pr' or '--nightly'")
|
||||
ctx.exit(1)
|
||||
|
||||
if "arm64" in slug:
|
||||
arch = "aarch64"
|
||||
|
||||
if run_id is None:
|
||||
run_id = tools.utils.gh.discover_run_id(
|
||||
ctx, branch=branch, nightly=nightly, pr=pr
|
||||
)
|
||||
|
||||
if run_id is None:
|
||||
ctx.error("Unable to find the appropriate workflow run ID")
|
||||
ctx.exit(1)
|
||||
|
||||
exitcode = tools.utils.gh.download_onedir_artifact(
|
||||
ctx, run_id=run_id, platform=platform, arch=arch, repository=repository
|
||||
)
|
||||
if exitcode and exitcode != ExitCode.SOFT_FAIL:
|
||||
ctx.exit(exitcode)
|
||||
exitcode = tools.utils.gh.download_nox_artifact(
|
||||
ctx, run_id=run_id, slug=slug, nox_env="ci-test-onedir", repository=repository
|
||||
)
|
||||
if exitcode and exitcode != ExitCode.SOFT_FAIL:
|
||||
ctx.exit(exitcode)
|
||||
if pkg:
|
||||
exitcode = tools.utils.gh.download_nox_artifact(
|
||||
ctx,
|
||||
run_id=run_id,
|
||||
slug=slug,
|
||||
nox_env=f"test-pkgs-onedir-{arch}",
|
||||
repository=repository,
|
||||
)
|
||||
if exitcode and exitcode != ExitCode.SOFT_FAIL:
|
||||
ctx.exit(exitcode)
|
||||
exitcode = tools.utils.gh.download_pkgs_artifact(
|
||||
ctx,
|
||||
run_id=run_id,
|
||||
slug=slug,
|
||||
arch=arch,
|
||||
repository=repository,
|
||||
)
|
||||
if exitcode and exitcode != ExitCode.SOFT_FAIL:
|
||||
ctx.exit(exitcode)
|
149
tools/testsuite/download.py
Normal file
149
tools/testsuite/download.py
Normal file
|
@ -0,0 +1,149 @@
|
|||
"""
|
||||
These commands are related to downloading test suite CI artifacts.
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
import tools.utils.gh
|
||||
|
||||
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
|
||||
OS_SLUGS = sorted(json.load(rfh))
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Define the command group
|
||||
download = command_group(
|
||||
name="download",
|
||||
help="Test Suite CI Artifacts Related Commands",
|
||||
description=__doc__,
|
||||
parent="ts",
|
||||
)
|
||||
|
||||
|
||||
@download.command(
|
||||
name="onedir-artifact",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"required": True,
|
||||
},
|
||||
"platform": {
|
||||
"help": "The onedir platform artifact to download",
|
||||
"choices": ("linux", "darwin", "windows"),
|
||||
"required": True,
|
||||
},
|
||||
"arch": {
|
||||
"help": "The onedir artifact architecture",
|
||||
"choices": ("x86_64", "aarch64", "amd64", "x86"),
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
},
|
||||
)
|
||||
def download_onedir_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
platform: str = None,
|
||||
arch: str = "x86_64",
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Download CI onedir artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert platform is not None
|
||||
|
||||
exitcode = tools.utils.gh.download_onedir_artifact(
|
||||
ctx=ctx, run_id=run_id, platform=platform, arch=arch, repository=repository
|
||||
)
|
||||
ctx.exit(exitcode)
|
||||
|
||||
|
||||
@download.command(
|
||||
name="nox-artifact",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"required": True,
|
||||
},
|
||||
"slug": {
|
||||
"help": "The OS slug",
|
||||
"required": True,
|
||||
"choices": OS_SLUGS,
|
||||
},
|
||||
"nox_env": {
|
||||
"help": "The nox environment name.",
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
},
|
||||
)
|
||||
def download_nox_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
slug: str = None,
|
||||
nox_env: str = "ci-test-onedir",
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Download CI nox artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert slug is not None
|
||||
|
||||
if slug.endswith("arm64"):
|
||||
slug = slug.replace("-arm64", "")
|
||||
nox_env += "-aarch64"
|
||||
|
||||
exitcode = tools.utils.gh.download_nox_artifact(
|
||||
ctx=ctx, run_id=run_id, slug=slug, nox_env=nox_env, repository=repository
|
||||
)
|
||||
ctx.exit(exitcode)
|
||||
|
||||
|
||||
@download.command(
|
||||
name="pkgs-artifact",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"required": True,
|
||||
},
|
||||
"slug": {
|
||||
"help": "The OS slug",
|
||||
"required": True,
|
||||
"choices": OS_SLUGS,
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
},
|
||||
)
|
||||
def download_pkgs_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
slug: str = None,
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Download CI built packages artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert slug is not None
|
||||
|
||||
exitcode = tools.utils.gh.download_pkgs_artifact(
|
||||
ctx=ctx, run_id=run_id, slug=slug, repository=repository
|
||||
)
|
||||
ctx.exit(exitcode)
|
327
tools/ts.py
327
tools/ts.py
|
@ -1,327 +0,0 @@
|
|||
"""
|
||||
These commands are related to the test suite.
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
import shutil
|
||||
import sys
|
||||
import zipfile
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
|
||||
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
|
||||
OS_SLUGS = sorted(json.load(rfh))
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
ts = command_group(name="ts", help="Test Suite Related Commands", description=__doc__)
|
||||
|
||||
|
||||
@ts.command(
|
||||
name="download-onedir-artifact",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"required": True,
|
||||
},
|
||||
"platform": {
|
||||
"help": "The onedir platform artifact to download",
|
||||
"choices": ("linux", "darwin", "windows"),
|
||||
"required": True,
|
||||
},
|
||||
"arch": {
|
||||
"help": "The onedir artifact architecture",
|
||||
"choices": ("x86_64", "aarch64", "amd64", "x86"),
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
},
|
||||
)
|
||||
def download_onedir_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
platform: str = None,
|
||||
arch: str = "x86_64",
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Download CI onedir artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert platform is not None
|
||||
|
||||
if platform == "windows":
|
||||
if arch in ("x64", "x86_64"):
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'amd64'")
|
||||
arch = "amd64"
|
||||
if arch not in ("amd64", "x86"):
|
||||
ctx.error(
|
||||
"The allowed values for '--arch' on Windows are 'amd64' and 'x86'"
|
||||
)
|
||||
ctx.exit(1)
|
||||
else:
|
||||
if arch == "arm64":
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'aarch64'")
|
||||
arch = "aarch64"
|
||||
elif arch == "x64":
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'x86_64'")
|
||||
arch = "x86_64"
|
||||
if arch not in ("x86_64", "aarch64"):
|
||||
ctx.error(
|
||||
f"The allowed values for '--arch' on {platform.title()} are 'x86_64', 'aarch64' or 'arm64'"
|
||||
)
|
||||
ctx.exit(1)
|
||||
artifacts_path = tools.utils.REPO_ROOT / "artifacts"
|
||||
artifacts_path.mkdir(exist_ok=True)
|
||||
if artifacts_path.joinpath("salt").exists():
|
||||
ctx.error("The 'artifacts/salt' directory already exists ...")
|
||||
ctx.exit(1)
|
||||
artifact_name = f"salt-*-onedir-{platform}-{arch}"
|
||||
if sys.platform.startswith("win"):
|
||||
artifact_name += ".zip"
|
||||
else:
|
||||
artifact_name += ".tar.xz"
|
||||
found_artifact_name = tools.utils.download_artifact(
|
||||
ctx,
|
||||
dest=artifacts_path,
|
||||
run_id=run_id,
|
||||
artifact_name=artifact_name,
|
||||
repository=repository,
|
||||
)
|
||||
found_artifact_path = artifacts_path / found_artifact_name
|
||||
artifact_expected_checksum = (
|
||||
artifacts_path.joinpath(f"{found_artifact_name}.SHA512").read_text().strip()
|
||||
)
|
||||
artifact_checksum = tools.utils.get_file_checksum(found_artifact_path, "sha512")
|
||||
if artifact_expected_checksum != artifact_checksum:
|
||||
ctx.error("The 'sha512' checksum does not match")
|
||||
ctx.error(f"{artifact_checksum!r} != {artifact_expected_checksum!r}")
|
||||
ctx.exit(1)
|
||||
|
||||
if found_artifact_path.suffix == ".zip":
|
||||
with zipfile.ZipFile(found_artifact_path) as zfile:
|
||||
zfile.extractall(path=artifacts_path)
|
||||
else:
|
||||
ctx.run("tar", "xf", found_artifact_name, cwd=artifacts_path)
|
||||
|
||||
|
||||
@ts.command(
|
||||
name="download-nox-artifact",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"required": True,
|
||||
},
|
||||
"slug": {
|
||||
"help": "The OS slug",
|
||||
"required": True,
|
||||
"choices": OS_SLUGS,
|
||||
},
|
||||
"nox_env": {
|
||||
"help": "The nox environment name.",
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
},
|
||||
)
|
||||
def download_nox_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
slug: str = None,
|
||||
nox_env: str = "ci-test-onedir",
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
"""
|
||||
Download CI nox artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert slug is not None
|
||||
|
||||
artifacts_path = tools.utils.REPO_ROOT / ".nox" / nox_env
|
||||
if artifacts_path.exists():
|
||||
ctx.error("The '.nox/' directory already exists ...")
|
||||
ctx.exit(1)
|
||||
artifact_name = f"nox-{slug}-{nox_env}"
|
||||
found_artifact_name = tools.utils.download_artifact(
|
||||
ctx,
|
||||
dest=tools.utils.REPO_ROOT,
|
||||
run_id=run_id,
|
||||
artifact_name=artifact_name,
|
||||
repository=repository,
|
||||
)
|
||||
nox = shutil.which("nox")
|
||||
if nox is None:
|
||||
ctx.error("Could not find the 'nox' binary in $PATH")
|
||||
ctx.exit(1)
|
||||
ctx.run(nox, "-e", "decompress-dependencies", "--", slug)
|
||||
|
||||
|
||||
@ts.command(
|
||||
name="setup",
|
||||
arguments={
|
||||
"run_id": {
|
||||
"help": "The workflow run ID from where to download artifacts from",
|
||||
"metavar": "RUN_ID_NUMBER",
|
||||
},
|
||||
"branch": {
|
||||
"help": "The branch from where to look for artifacts.",
|
||||
"metavar": "BRANCH_NAME",
|
||||
},
|
||||
"pr": {
|
||||
"help": "The pull-request from where to look for artifacts.",
|
||||
"metavar": "PR_NUMBER",
|
||||
},
|
||||
"nightly": {
|
||||
"help": "The nightly build branch from where to look for artifacts.",
|
||||
"metavar": "BRANCH_NAME",
|
||||
},
|
||||
"platform": {
|
||||
"help": "The onedir platform artifact to download",
|
||||
"choices": ("linux", "darwin", "windows"),
|
||||
"required": True,
|
||||
},
|
||||
"arch": {
|
||||
"help": "The onedir artifact architecture",
|
||||
"choices": ("x86_64", "aarch64", "amd64", "x86"),
|
||||
},
|
||||
"slug": {
|
||||
"help": "The OS slug",
|
||||
"required": True,
|
||||
"choices": OS_SLUGS,
|
||||
},
|
||||
"repository": {
|
||||
"help": "The repository to query, e.g. saltstack/salt",
|
||||
},
|
||||
"nox_env": {
|
||||
"help": "The nox environment name.",
|
||||
},
|
||||
},
|
||||
)
|
||||
def setup_testsuite(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
branch: str = None,
|
||||
nightly: str = None,
|
||||
pr: int = None,
|
||||
platform: str = None,
|
||||
arch="x86_64",
|
||||
slug: str = None,
|
||||
repository: str = "saltstack/salt",
|
||||
nox_env: str = "ci-test-onedir",
|
||||
):
|
||||
"""
|
||||
Setup the local test suite.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert platform is not None
|
||||
assert slug is not None
|
||||
|
||||
mutually_exclusive_flags = [
|
||||
run_id is not None,
|
||||
branch is not None,
|
||||
pr is not None,
|
||||
nightly is not None,
|
||||
]
|
||||
if not any(mutually_exclusive_flags):
|
||||
ctx.error("Pass one of '--run-id', '--branch', '--pr' or '--nightly'")
|
||||
ctx.exit(1)
|
||||
if len(list(filter(None, mutually_exclusive_flags))) > 1:
|
||||
ctx.error("Pass only one of '--run-id', '--branch', '--pr' or '--nightly'")
|
||||
ctx.exit(1)
|
||||
|
||||
if run_id is None:
|
||||
run_id = _discover_run_id(ctx, branch=branch, nightly=nightly, pr=pr)
|
||||
|
||||
if run_id is None:
|
||||
ctx.error("Unable to find the appropriate workflow run ID")
|
||||
ctx.exit(1)
|
||||
|
||||
download_onedir_artifact(
|
||||
ctx, run_id=run_id, platform=platform, arch=arch, repository=repository
|
||||
)
|
||||
download_nox_artifact(
|
||||
ctx, run_id=run_id, slug=slug, nox_env=nox_env, repository=repository
|
||||
)
|
||||
|
||||
|
||||
def _discover_run_id(
|
||||
ctx: Context,
|
||||
branch: str = None,
|
||||
nightly: str = None,
|
||||
pr: int = None,
|
||||
repository: str = "saltstack/salt",
|
||||
):
|
||||
run_id: int | None = None
|
||||
with ctx.web as web:
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
}
|
||||
github_token = tools.utils.get_github_token(ctx)
|
||||
if github_token is not None:
|
||||
headers["Authorization"] = f"Bearer {github_token}"
|
||||
web.headers.update(headers)
|
||||
|
||||
if branch is not None:
|
||||
event = "push"
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/git/ref/heads/{branch}"
|
||||
)
|
||||
data = ret.json()
|
||||
if "message" in data:
|
||||
ctx.error(f"Could not find HEAD commit for branch {branch}")
|
||||
ctx.exit(1)
|
||||
head_sha = data["object"]["sha"]
|
||||
elif pr is not None:
|
||||
event = "pull_request"
|
||||
ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}")
|
||||
data = ret.json()
|
||||
head_sha = data["head"]["sha"]
|
||||
elif nightly == "master":
|
||||
event = "schedule"
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/git/ref/heads/{nightly}"
|
||||
)
|
||||
data = ret.json()
|
||||
if "message" in data:
|
||||
ctx.error(f"Could not find HEAD commit for branch {nightly}")
|
||||
ctx.exit(1)
|
||||
head_sha = data["object"]["sha"]
|
||||
else:
|
||||
event = "workflow_dispatch"
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/git/ref/heads/{nightly}"
|
||||
)
|
||||
data = ret.json()
|
||||
if "message" in data:
|
||||
ctx.error(f"Could not find HEAD commit for branch {nightly}")
|
||||
ctx.exit(1)
|
||||
head_sha = data["object"]["sha"]
|
||||
|
||||
page = 0
|
||||
while True:
|
||||
page += 1
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/actions/runs?per_page=100&page={page}&event={event}&head_sha={head_sha}"
|
||||
)
|
||||
data = ret.json()
|
||||
if not data["workflow_runs"]:
|
||||
break
|
||||
workflow_runs = data["workflow_runs"]
|
||||
for workflow_run in workflow_runs:
|
||||
run_id = workflow_run["id"]
|
||||
break
|
||||
|
||||
if run_id:
|
||||
ctx.info(f"Discovered run_id: {run_id}")
|
||||
return run_id
|
|
@ -11,6 +11,7 @@ import sys
|
|||
import tempfile
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from enum import IntEnum
|
||||
from typing import Any
|
||||
|
||||
import packaging.version
|
||||
|
@ -33,6 +34,12 @@ RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release"
|
|||
BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup"
|
||||
|
||||
|
||||
class ExitCode(IntEnum):
|
||||
OK = 0
|
||||
FAIL = 1
|
||||
SOFT_FAIL = 2
|
||||
|
||||
|
||||
def create_progress_bar(file_progress: bool = False, **kwargs):
|
||||
if file_progress:
|
||||
return Progress(
|
||||
|
@ -200,84 +207,7 @@ def get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str:
|
|||
return hexdigest
|
||||
|
||||
|
||||
def get_github_token(ctx: Context) -> str | None:
|
||||
"""
|
||||
Get the GITHUB_TOKEN to be able to authenticate to the API.
|
||||
"""
|
||||
github_token = os.environ.get("GITHUB_TOKEN")
|
||||
if github_token is None:
|
||||
gh = shutil.which("gh")
|
||||
ret = ctx.run(gh, "auth", "token", check=False, capture=True)
|
||||
if ret.returncode == 0:
|
||||
github_token = ret.stdout.decode().strip() or None
|
||||
return github_token
|
||||
|
||||
|
||||
def download_artifact(
|
||||
ctx: Context,
|
||||
dest: pathlib.Path,
|
||||
run_id: int,
|
||||
repository: str = "saltstack/salt",
|
||||
artifact_name: str | None = None,
|
||||
):
|
||||
"""
|
||||
Download CI artifacts.
|
||||
"""
|
||||
github_token = get_github_token(ctx)
|
||||
if github_token is None:
|
||||
ctx.error("Downloading artifacts requires being authenticated to GitHub.")
|
||||
ctx.info(
|
||||
"Either set 'GITHUB_TOKEN' to a valid token, or configure the 'gh' tool such that "
|
||||
"'gh auth token' returns a token."
|
||||
)
|
||||
ctx.exit(1)
|
||||
with ctx.web as web:
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"Authorization": f"Bearer {github_token}",
|
||||
}
|
||||
web.headers.update(headers)
|
||||
page = 0
|
||||
found_artifact = False
|
||||
while True:
|
||||
if found_artifact:
|
||||
break
|
||||
page += 1
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/actions/runs/{run_id}/artifacts?per_page=100&page={page}"
|
||||
)
|
||||
if ret.status_code != 200:
|
||||
ctx.error(
|
||||
f"Failed to get the artifacts for the run ID {run_id} for repository {repository!r}: {ret.reason}"
|
||||
)
|
||||
ctx.exit(1)
|
||||
data = ret.json()
|
||||
if not data["artifacts"]:
|
||||
break
|
||||
for artifact in data["artifacts"]:
|
||||
if fnmatch.fnmatch(artifact["name"], artifact_name):
|
||||
found_artifact = artifact["name"]
|
||||
tempdir_path = pathlib.Path(tempfile.gettempdir())
|
||||
download_url = artifact["archive_download_url"]
|
||||
ctx.info(f"Downloading {download_url}")
|
||||
downloaded_artifact = _download_file(
|
||||
ctx,
|
||||
download_url,
|
||||
tempdir_path / f"{artifact['name']}.zip",
|
||||
headers=headers,
|
||||
)
|
||||
ctx.info("Downloaded", downloaded_artifact)
|
||||
with zipfile.ZipFile(downloaded_artifact) as zfile:
|
||||
zfile.extractall(path=dest)
|
||||
break
|
||||
if found_artifact is False:
|
||||
ctx.error(f"Failed to find an artifact by the name of {artifact_name!r}")
|
||||
ctx.exit(1)
|
||||
|
||||
return found_artifact
|
||||
|
||||
|
||||
def _download_file(
|
||||
def download_file(
|
||||
ctx: Context,
|
||||
url: str,
|
||||
dest: pathlib.Path,
|
||||
|
|
360
tools/utils/gh.py
Normal file
360
tools/utils/gh.py
Normal file
|
@ -0,0 +1,360 @@
|
|||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated,bad-whitespace
|
||||
from __future__ import annotations
|
||||
|
||||
import fnmatch
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
import sys
|
||||
import tempfile
|
||||
import zipfile
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ptscripts import Context
|
||||
|
||||
import tools.utils
|
||||
from tools.utils import ExitCode
|
||||
|
||||
|
||||
def download_onedir_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
platform: str = None,
|
||||
arch: str = "x86_64",
|
||||
repository: str = "saltstack/salt",
|
||||
) -> int:
|
||||
"""
|
||||
Download CI onedir artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert platform is not None
|
||||
|
||||
if platform == "windows":
|
||||
if arch in ("x64", "x86_64"):
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'amd64'")
|
||||
arch = "amd64"
|
||||
if arch not in ("amd64", "x86"):
|
||||
ctx.error(
|
||||
"The allowed values for '--arch' on Windows are 'amd64' and 'x86'"
|
||||
)
|
||||
return ExitCode.FAIL
|
||||
else:
|
||||
if arch == "arm64":
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'aarch64'")
|
||||
arch = "aarch64"
|
||||
elif arch == "x64":
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'x86_64'")
|
||||
arch = "x86_64"
|
||||
if arch not in ("x86_64", "aarch64"):
|
||||
ctx.error(
|
||||
f"The allowed values for '--arch' on {platform.title()} are 'x86_64', 'aarch64' or 'arm64'"
|
||||
)
|
||||
return ExitCode.FAIL
|
||||
artifacts_path = tools.utils.REPO_ROOT / "artifacts"
|
||||
artifacts_path.mkdir(exist_ok=True)
|
||||
if artifacts_path.joinpath("salt").exists():
|
||||
ctx.warn(
|
||||
"The 'artifacts/salt' directory already exists ... Stopped processing."
|
||||
)
|
||||
return ExitCode.SOFT_FAIL
|
||||
artifact_name = f"salt-*-onedir-{platform}-{arch}"
|
||||
if sys.platform.startswith("win"):
|
||||
artifact_name += ".zip"
|
||||
else:
|
||||
artifact_name += ".tar.xz"
|
||||
found_artifact_name = download_artifact(
|
||||
ctx,
|
||||
dest=artifacts_path,
|
||||
run_id=run_id,
|
||||
artifact_name=artifact_name,
|
||||
repository=repository,
|
||||
)
|
||||
if found_artifact_name is None:
|
||||
return ExitCode.FAIL
|
||||
found_artifact_path = artifacts_path / found_artifact_name
|
||||
artifact_expected_checksum = (
|
||||
artifacts_path.joinpath(f"{found_artifact_name}.SHA512").read_text().strip()
|
||||
)
|
||||
artifact_checksum = tools.utils.get_file_checksum(found_artifact_path, "sha512")
|
||||
if artifact_expected_checksum != artifact_checksum:
|
||||
ctx.error("The 'sha512' checksum does not match")
|
||||
ctx.error(f"{artifact_checksum!r} != {artifact_expected_checksum!r}")
|
||||
return ExitCode.FAIL
|
||||
|
||||
if found_artifact_path.suffix == ".zip":
|
||||
with zipfile.ZipFile(found_artifact_path) as zfile:
|
||||
zfile.extractall(path=artifacts_path)
|
||||
else:
|
||||
ctx.run("tar", "xf", found_artifact_name, cwd=artifacts_path)
|
||||
|
||||
return ExitCode.OK
|
||||
|
||||
|
||||
def download_nox_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
slug: str = None,
|
||||
nox_env: str = "ci-test-onedir",
|
||||
repository: str = "saltstack/salt",
|
||||
) -> ExitCode:
|
||||
"""
|
||||
Download CI nox artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert slug is not None
|
||||
|
||||
artifacts_path = tools.utils.REPO_ROOT / ".nox" / nox_env
|
||||
if artifacts_path.exists():
|
||||
ctx.error(
|
||||
f"The '.nox/{nox_env}' directory already exists ... Stopped processing."
|
||||
)
|
||||
return ExitCode.SOFT_FAIL
|
||||
artifact_name = f"nox-{slug}-{nox_env}"
|
||||
found_artifact_name = download_artifact(
|
||||
ctx,
|
||||
dest=tools.utils.REPO_ROOT,
|
||||
run_id=run_id,
|
||||
artifact_name=artifact_name,
|
||||
repository=repository,
|
||||
)
|
||||
nox = shutil.which("nox")
|
||||
if nox is None:
|
||||
ctx.error("Could not find the 'nox' binary in $PATH")
|
||||
return ExitCode.FAIL
|
||||
ret = ctx.run(nox, "-e", "decompress-dependencies", "--", slug, check=False)
|
||||
if ret.returncode:
|
||||
ctx.error("Failed to decompress the nox dependencies")
|
||||
return ExitCode.FAIL
|
||||
return ExitCode.OK
|
||||
|
||||
|
||||
def download_pkgs_artifact(
|
||||
ctx: Context,
|
||||
run_id: int = None,
|
||||
slug: str = None,
|
||||
arch: str = "x86_64",
|
||||
repository: str = "saltstack/salt",
|
||||
) -> ExitCode:
|
||||
"""
|
||||
Download CI nox artifacts.
|
||||
"""
|
||||
if TYPE_CHECKING:
|
||||
assert run_id is not None
|
||||
assert slug is not None
|
||||
|
||||
artifact_name = "salt-*-"
|
||||
if "windows" in slug:
|
||||
if arch in ("x64", "x86_64"):
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'amd64'")
|
||||
arch = "amd64"
|
||||
if arch not in ("amd64", "x86"):
|
||||
ctx.error(
|
||||
"The allowed values for '--arch' on Windows are 'amd64' and 'x86'"
|
||||
)
|
||||
return ExitCode.FAIL
|
||||
artifact_name += f"{arch}-MSI"
|
||||
else:
|
||||
if arch == "arm64":
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'aarch64'")
|
||||
arch = "aarch64"
|
||||
elif arch == "x64":
|
||||
ctx.info(f"Turning passed arch {arch!r} into 'x86_64'")
|
||||
arch = "x86_64"
|
||||
if arch not in ("x86_64", "aarch64"):
|
||||
ctx.error(
|
||||
f"The allowed values for '--arch' for {slug} are 'x86_64', 'aarch64' or 'arm64'"
|
||||
)
|
||||
return ExitCode.FAIL
|
||||
|
||||
if slug.startswith(("debian", "ubuntu")):
|
||||
artifact_name += f"{arch}-deb"
|
||||
elif slug.startswith(
|
||||
("almalinux", "amazonlinux", "centos", "fedora", "opensuse", "photonos")
|
||||
):
|
||||
artifact_name += f"{arch}-rpm"
|
||||
else:
|
||||
ctx.error(f"We do not build packages for {slug}")
|
||||
return ExitCode.FAIL
|
||||
|
||||
artifacts_path = tools.utils.REPO_ROOT / "pkg" / "artifacts"
|
||||
artifacts_path.mkdir(exist_ok=True)
|
||||
|
||||
found_artifact_name = download_artifact(
|
||||
ctx,
|
||||
dest=artifacts_path,
|
||||
run_id=run_id,
|
||||
artifact_name=artifact_name,
|
||||
repository=repository,
|
||||
)
|
||||
if found_artifact_name is None:
|
||||
return ExitCode.FAIL
|
||||
return ExitCode.OK
|
||||
|
||||
|
||||
def get_github_token(ctx: Context) -> str | None:
|
||||
"""
|
||||
Get the GITHUB_TOKEN to be able to authenticate to the API.
|
||||
"""
|
||||
github_token = os.environ.get("GITHUB_TOKEN")
|
||||
if github_token is None:
|
||||
gh = shutil.which("gh")
|
||||
ret = ctx.run(gh, "auth", "token", check=False, capture=True)
|
||||
if ret.returncode == 0:
|
||||
github_token = ret.stdout.decode().strip() or None
|
||||
return github_token
|
||||
|
||||
|
||||
def download_artifact(
|
||||
ctx: Context,
|
||||
dest: pathlib.Path,
|
||||
run_id: int,
|
||||
repository: str = "saltstack/salt",
|
||||
artifact_name: str | None = None,
|
||||
) -> str | None:
|
||||
"""
|
||||
Download CI artifacts.
|
||||
"""
|
||||
found_artifact: str | None = None
|
||||
github_token = get_github_token(ctx)
|
||||
if github_token is None:
|
||||
ctx.error("Downloading artifacts requires being authenticated to GitHub.")
|
||||
ctx.info(
|
||||
"Either set 'GITHUB_TOKEN' to a valid token, or configure the 'gh' tool such that "
|
||||
"'gh auth token' returns a token."
|
||||
)
|
||||
return found_artifact
|
||||
with ctx.web as web:
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
"Authorization": f"Bearer {github_token}",
|
||||
}
|
||||
web.headers.update(headers)
|
||||
page = 0
|
||||
while True:
|
||||
if found_artifact is not None:
|
||||
break
|
||||
page += 1
|
||||
params = {
|
||||
"per_page": 100,
|
||||
"page": 1,
|
||||
}
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/actions/runs/{run_id}/artifacts",
|
||||
params=params,
|
||||
)
|
||||
if ret.status_code != 200:
|
||||
ctx.error(
|
||||
f"Failed to get the artifacts for the run ID {run_id} for repository {repository!r}: {ret.reason}"
|
||||
)
|
||||
ctx.exit(1)
|
||||
data = ret.json()
|
||||
if not data["artifacts"]:
|
||||
break
|
||||
for artifact in data["artifacts"]:
|
||||
if fnmatch.fnmatch(artifact["name"], artifact_name):
|
||||
found_artifact = artifact["name"]
|
||||
tempdir_path = pathlib.Path(tempfile.gettempdir())
|
||||
download_url = artifact["archive_download_url"]
|
||||
ctx.info(f"Downloading {download_url}")
|
||||
downloaded_artifact = tools.utils.download_file(
|
||||
ctx,
|
||||
download_url,
|
||||
tempdir_path / f"{artifact['name']}.zip",
|
||||
headers=headers,
|
||||
)
|
||||
ctx.info("Downloaded", downloaded_artifact)
|
||||
with zipfile.ZipFile(downloaded_artifact) as zfile:
|
||||
zfile.extractall(path=dest)
|
||||
break
|
||||
if found_artifact is None:
|
||||
ctx.error(f"Failed to find an artifact by the name of {artifact_name!r}")
|
||||
return found_artifact
|
||||
|
||||
|
||||
def discover_run_id(
|
||||
ctx: Context,
|
||||
branch: str = None,
|
||||
nightly: str = None,
|
||||
pr: int = None,
|
||||
repository: str = "saltstack/salt",
|
||||
) -> int | None:
|
||||
ctx.info(f"Discovering the run_id({branch=}, {nightly=}, {pr=}, {repository=})")
|
||||
run_id: int | None = None
|
||||
with ctx.web as web:
|
||||
headers = {
|
||||
"Accept": "application/vnd.github+json",
|
||||
}
|
||||
github_token = get_github_token(ctx)
|
||||
if github_token is not None:
|
||||
headers["Authorization"] = f"Bearer {github_token}"
|
||||
web.headers.update(headers)
|
||||
|
||||
params = {
|
||||
"per_page": 100,
|
||||
"status": "completed",
|
||||
}
|
||||
if branch is not None:
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/git/ref/heads/{branch}"
|
||||
)
|
||||
data = ret.json()
|
||||
if "message" in data:
|
||||
ctx.error(f"Could not find HEAD commit for branch {branch}")
|
||||
ctx.exit(1)
|
||||
params["event"] = "push"
|
||||
head_sha = data["object"]["sha"]
|
||||
elif pr is not None:
|
||||
ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}")
|
||||
data = ret.json()
|
||||
params["event"] = "pull_request"
|
||||
head_sha = data["head"]["sha"]
|
||||
elif nightly == "master":
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/git/ref/heads/{nightly}"
|
||||
)
|
||||
data = ret.json()
|
||||
if "message" in data:
|
||||
ctx.error(f"Could not find HEAD commit for branch {nightly}")
|
||||
ctx.exit(1)
|
||||
params["event"] = "schedule"
|
||||
head_sha = data["object"]["sha"]
|
||||
else:
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/git/ref/heads/{nightly}"
|
||||
)
|
||||
data = ret.json()
|
||||
if "message" in data:
|
||||
ctx.error(f"Could not find HEAD commit for branch {nightly}")
|
||||
ctx.exit(1)
|
||||
params["event"] = "workflow_dispatch"
|
||||
head_sha = data["object"]["sha"]
|
||||
|
||||
params["head_sha"] = head_sha
|
||||
# params.pop("event")
|
||||
ctx.info(f"Searching for workflow runs for HEAD SHA: {head_sha}")
|
||||
page = 0
|
||||
while True:
|
||||
if run_id is not None:
|
||||
break
|
||||
page += 1
|
||||
params["page"] = page
|
||||
ret = web.get(
|
||||
f"https://api.github.com/repos/{repository}/actions/runs", params=params
|
||||
)
|
||||
data = ret.json()
|
||||
ctx.info(
|
||||
f"Discovered {data['total_count']} workflow runs for HEAD SHA {head_sha}"
|
||||
)
|
||||
# ctx.info(data)
|
||||
if not data["workflow_runs"]:
|
||||
break
|
||||
workflow_runs = data["workflow_runs"]
|
||||
for workflow_run in workflow_runs:
|
||||
run_id = workflow_run["id"]
|
||||
break
|
||||
|
||||
if run_id:
|
||||
ctx.info(f"Discovered run_id: {run_id}")
|
||||
return run_id
|
Loading…
Add table
Reference in a new issue