mirror of
https://github.com/saltstack/salt.git
synced 2025-04-16 09:40:20 +00:00
Add tools/utils.py
and move some common functionality there.
Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
parent
f57bf390a3
commit
4c8cc60629
8 changed files with 151 additions and 132 deletions
|
@ -7,16 +7,15 @@ from __future__ import annotations
|
|||
import datetime
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import subprocess
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
import tools.utils
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
changelog = command_group(
|
||||
|
@ -25,7 +24,7 @@ changelog = command_group(
|
|||
description=__doc__,
|
||||
venv_config={
|
||||
"requirements_files": [
|
||||
REPO_ROOT
|
||||
tools.utils.REPO_ROOT
|
||||
/ "requirements"
|
||||
/ "static"
|
||||
/ "ci"
|
||||
|
|
16
tools/ci.py
16
tools/ci.py
|
@ -13,9 +13,9 @@ from typing import TYPE_CHECKING
|
|||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
import tools.utils
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
ci = command_group(name="ci", help="CI Related Commands", description=__doc__)
|
||||
|
@ -107,7 +107,11 @@ def process_changed_files(ctx: Context, event_name: str, changed_files: pathlib.
|
|||
if not entry:
|
||||
loaded_data.remove(entry)
|
||||
try:
|
||||
entry = REPO_ROOT.joinpath(entry).resolve().relative_to(REPO_ROOT)
|
||||
entry = (
|
||||
tools.utils.REPO_ROOT.joinpath(entry)
|
||||
.resolve()
|
||||
.relative_to(tools.utils.REPO_ROOT)
|
||||
)
|
||||
except ValueError:
|
||||
ctx.error(
|
||||
f"While processing the changed files key {key!r}, the "
|
||||
|
@ -417,10 +421,12 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
|
|||
wfh.write("</pre>\n</details>\n")
|
||||
testrun = {"type": "full"}
|
||||
else:
|
||||
testrun_changed_files_path = REPO_ROOT / "testrun-changed-files.txt"
|
||||
testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
|
||||
testrun = {
|
||||
"type": "changed",
|
||||
"from-filenames": str(testrun_changed_files_path.relative_to(REPO_ROOT)),
|
||||
"from-filenames": str(
|
||||
testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
|
||||
),
|
||||
}
|
||||
ctx.info(f"Writing {testrun_changed_files_path.name} ...")
|
||||
selected_changed_files = []
|
||||
|
|
|
@ -12,9 +12,9 @@ import sys
|
|||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
import tools.utils
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
docs = command_group(
|
||||
|
@ -23,7 +23,7 @@ docs = command_group(
|
|||
description=__doc__,
|
||||
venv_config={
|
||||
"requirements_files": [
|
||||
REPO_ROOT
|
||||
tools.utils.REPO_ROOT
|
||||
/ "requirements"
|
||||
/ "static"
|
||||
/ "ci"
|
||||
|
|
30
tools/pkg.py
30
tools/pkg.py
|
@ -19,9 +19,9 @@ import tempfile
|
|||
import yaml
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
import tools.utils
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
# Define the command group
|
||||
pkg = command_group(name="pkg", help="Packaging Related Commands", description=__doc__)
|
||||
|
@ -112,14 +112,14 @@ def set_salt_version(
|
|||
"""
|
||||
Write the Salt version to 'salt/_version.txt'
|
||||
"""
|
||||
salt_version_file = REPO_ROOT / "salt" / "_version.txt"
|
||||
salt_version_file = tools.utils.REPO_ROOT / "salt" / "_version.txt"
|
||||
if salt_version_file.exists():
|
||||
if not overwrite:
|
||||
ctx.error("The 'salt/_version.txt' file already exists")
|
||||
ctx.exit(1)
|
||||
salt_version_file.unlink()
|
||||
if salt_version is None:
|
||||
if not REPO_ROOT.joinpath(".git").exists():
|
||||
if not tools.utils.REPO_ROOT.joinpath(".git").exists():
|
||||
ctx.error(
|
||||
"Apparently not running from a Salt repository checkout. "
|
||||
"Unable to discover the Salt version."
|
||||
|
@ -133,7 +133,7 @@ def set_salt_version(
|
|||
ctx.info(f"Validating and normalizing the salt version {salt_version!r}...")
|
||||
with ctx.virtualenv(
|
||||
name="set-salt-version",
|
||||
requirements_files=[REPO_ROOT / "requirements" / "base.txt"],
|
||||
requirements_files=[tools.utils.REPO_ROOT / "requirements" / "base.txt"],
|
||||
) as venv:
|
||||
code = f"""
|
||||
import sys
|
||||
|
@ -153,14 +153,14 @@ def set_salt_version(
|
|||
ctx.exit(ctx.returncode)
|
||||
salt_version = ret.stdout.strip().decode()
|
||||
|
||||
if not REPO_ROOT.joinpath("salt").is_dir():
|
||||
if not tools.utils.REPO_ROOT.joinpath("salt").is_dir():
|
||||
ctx.error(
|
||||
"The path 'salt/' is not a directory. Unable to write 'salt/_version.txt'"
|
||||
)
|
||||
ctx.exit(1)
|
||||
|
||||
try:
|
||||
REPO_ROOT.joinpath("salt/_version.txt").write_text(salt_version)
|
||||
tools.utils.REPO_ROOT.joinpath("salt/_version.txt").write_text(salt_version)
|
||||
except Exception as exc:
|
||||
ctx.error(f"Unable to write 'salt/_version.txt': {exc}")
|
||||
ctx.exit(1)
|
||||
|
@ -211,7 +211,9 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
|
|||
|
||||
When running on Windows and macOS, some additional cleanup is also done.
|
||||
"""
|
||||
with open(str(REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")) as rfh:
|
||||
with open(
|
||||
str(tools.utils.REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")
|
||||
) as rfh:
|
||||
patterns = yaml.safe_load(rfh.read())
|
||||
|
||||
if pkg:
|
||||
|
@ -317,7 +319,7 @@ def generate_hashes(ctx: Context, files: list[pathlib.Path]):
|
|||
name="source-tarball",
|
||||
venv_config={
|
||||
"requirements_files": [
|
||||
REPO_ROOT / "requirements" / "build.txt",
|
||||
tools.utils.REPO_ROOT / "requirements" / "build.txt",
|
||||
]
|
||||
},
|
||||
)
|
||||
|
@ -342,20 +344,20 @@ def source_tarball(ctx: Context):
|
|||
"-m",
|
||||
"build",
|
||||
"--sdist",
|
||||
str(REPO_ROOT),
|
||||
str(tools.utils.REPO_ROOT),
|
||||
env=env,
|
||||
check=True,
|
||||
)
|
||||
# Recreate sdist to be reproducible
|
||||
recompress = Recompress(timestamp)
|
||||
for targz in REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
|
||||
ctx.info(f"Re-compressing {targz.relative_to(REPO_ROOT)} ...")
|
||||
for targz in tools.utils.REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
|
||||
ctx.info(f"Re-compressing {targz.relative_to(tools.utils.REPO_ROOT)} ...")
|
||||
recompress.recompress(targz)
|
||||
sha256sum = shutil.which("sha256sum")
|
||||
if sha256sum:
|
||||
packages = [
|
||||
str(pkg.relative_to(REPO_ROOT))
|
||||
for pkg in REPO_ROOT.joinpath("dist").iterdir()
|
||||
str(pkg.relative_to(tools.utils.REPO_ROOT))
|
||||
for pkg in tools.utils.REPO_ROOT.joinpath("dist").iterdir()
|
||||
]
|
||||
ctx.run("sha256sum", *packages)
|
||||
ctx.run("python3", "-m", "twine", "check", "dist/*", check=True)
|
||||
|
|
107
tools/pkgrepo.py
107
tools/pkgrepo.py
|
@ -19,19 +19,11 @@ import packaging.version
|
|||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.pkg
|
||||
import tools.utils
|
||||
|
||||
try:
|
||||
import boto3
|
||||
from botocore.exceptions import ClientError
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
Column,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
except ImportError:
|
||||
print(
|
||||
"\nPlease run 'python -m pip install -r "
|
||||
|
@ -43,12 +35,6 @@ except ImportError:
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
|
||||
NIGHTLY_BUCKET_NAME = "salt-project-prod-salt-artifacts-nightly"
|
||||
STAGING_BUCKET_NAME = "salt-project-prod-salt-artifacts-staging"
|
||||
RELEASE_BUCKET_NAME = "salt-project-prod-salt-artifacts-release"
|
||||
|
||||
# Define the command group
|
||||
repo = command_group(
|
||||
name="repo",
|
||||
|
@ -227,7 +213,7 @@ def debian(
|
|||
ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config))
|
||||
|
||||
# Export the GPG key in use
|
||||
_export_gpg_key(ctx, key_id, repo_path, create_repo_path)
|
||||
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
|
||||
|
||||
pool_path = create_repo_path / "pool"
|
||||
pool_path.mkdir(exist_ok=True)
|
||||
|
@ -415,7 +401,7 @@ def rpm(
|
|||
)
|
||||
|
||||
# Export the GPG key in use
|
||||
_export_gpg_key(ctx, key_id, repo_path, create_repo_path)
|
||||
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
|
||||
|
||||
for fpath in incoming.iterdir():
|
||||
if ".src" in fpath.suffixes:
|
||||
|
@ -775,9 +761,9 @@ def _create_onedir_based_repo(
|
|||
repo_json_path = create_repo_path.parent / "repo.json"
|
||||
|
||||
if nightly_build:
|
||||
bucket_name = NIGHTLY_BUCKET_NAME
|
||||
bucket_name = tools.utils.NIGHTLY_BUCKET_NAME
|
||||
else:
|
||||
bucket_name = STAGING_BUCKET_NAME
|
||||
bucket_name = tools.utils.STAGING_BUCKET_NAME
|
||||
|
||||
release_json = {}
|
||||
|
||||
|
@ -827,24 +813,10 @@ def _create_onedir_based_repo(
|
|||
for fpath in create_repo_path.iterdir():
|
||||
if fpath.suffix in pkg_suffixes:
|
||||
continue
|
||||
ctx.info(f"GPG Signing '{fpath.relative_to(repo_path)}' ...")
|
||||
signature_fpath = fpath.parent / f"{fpath.name}.asc"
|
||||
if signature_fpath.exists():
|
||||
signature_fpath.unlink()
|
||||
ctx.run(
|
||||
"gpg",
|
||||
"--local-user",
|
||||
key_id,
|
||||
"--output",
|
||||
str(signature_fpath),
|
||||
"--armor",
|
||||
"--detach-sign",
|
||||
"--sign",
|
||||
str(fpath),
|
||||
)
|
||||
tools.utils.gpg_sign(ctx, key_id, fpath)
|
||||
|
||||
# Export the GPG key in use
|
||||
_export_gpg_key(ctx, key_id, repo_path, create_repo_path)
|
||||
tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
|
||||
|
||||
repo_json = _get_repo_json_file_contents(
|
||||
ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path
|
||||
|
@ -902,13 +874,13 @@ def _get_repo_json_file_contents(
|
|||
ctx.info(f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file")
|
||||
size = ret["ContentLength"]
|
||||
with repo_json_path.open("wb") as wfh:
|
||||
with create_progress_bar(file_progress=True) as progress:
|
||||
with tools.utils.create_progress_bar(file_progress=True) as progress:
|
||||
task = progress.add_task(description="Downloading...", total=size)
|
||||
s3.download_fileobj(
|
||||
Bucket=bucket_name,
|
||||
Key=str(repo_json_path.relative_to(repo_path)),
|
||||
Fileobj=wfh,
|
||||
Callback=UpdateProgress(progress, task),
|
||||
Callback=tools.utils.UpdateProgress(progress, task),
|
||||
)
|
||||
with repo_json_path.open() as rfh:
|
||||
repo_json = json.load(rfh)
|
||||
|
@ -950,11 +922,11 @@ def _publish_repo(
|
|||
Publish packaging repositories.
|
||||
"""
|
||||
if nightly_build:
|
||||
bucket_name = NIGHTLY_BUCKET_NAME
|
||||
bucket_name = tools.utils.NIGHTLY_BUCKET_NAME
|
||||
elif stage:
|
||||
bucket_name = STAGING_BUCKET_NAME
|
||||
bucket_name = tools.utils.STAGING_BUCKET_NAME
|
||||
else:
|
||||
bucket_name = RELEASE_BUCKET_NAME
|
||||
bucket_name = tools.utils.RELEASE_BUCKET_NAME
|
||||
|
||||
ctx.info("Preparing upload ...")
|
||||
s3 = boto3.client("s3")
|
||||
|
@ -991,7 +963,7 @@ def _publish_repo(
|
|||
path = pathlib.Path(dirpath, fpath)
|
||||
to_upload_paths.append(path)
|
||||
|
||||
with create_progress_bar() as progress:
|
||||
with tools.utils.create_progress_bar() as progress:
|
||||
task = progress.add_task(
|
||||
"Deleting directories to override.", total=len(to_delete_paths)
|
||||
)
|
||||
|
@ -1015,48 +987,18 @@ def _publish_repo(
|
|||
relpath = upload_path.relative_to(repo_path)
|
||||
size = upload_path.stat().st_size
|
||||
ctx.info(f" {relpath}")
|
||||
with create_progress_bar(file_progress=True) as progress:
|
||||
with tools.utils.create_progress_bar(file_progress=True) as progress:
|
||||
task = progress.add_task(description="Uploading...", total=size)
|
||||
s3.upload_file(
|
||||
str(upload_path),
|
||||
bucket_name,
|
||||
str(relpath),
|
||||
Callback=UpdateProgress(progress, task),
|
||||
Callback=tools.utils.UpdateProgress(progress, task),
|
||||
)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
class UpdateProgress:
|
||||
def __init__(self, progress, task):
|
||||
self.progress = progress
|
||||
self.task = task
|
||||
|
||||
def __call__(self, chunk_size):
|
||||
self.progress.update(self.task, advance=chunk_size)
|
||||
|
||||
|
||||
def create_progress_bar(file_progress: bool = False, **kwargs):
|
||||
if file_progress:
|
||||
return Progress(
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
BarColumn(),
|
||||
DownloadColumn(),
|
||||
TransferSpeedColumn(),
|
||||
TextColumn("eta"),
|
||||
TimeRemainingColumn(),
|
||||
**kwargs,
|
||||
)
|
||||
return Progress(
|
||||
TextColumn(
|
||||
"[progress.description]{task.description}", table_column=Column(ratio=3)
|
||||
),
|
||||
BarColumn(),
|
||||
expand=True,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def _create_repo_path(
|
||||
repo_path: pathlib.Path,
|
||||
salt_version: str,
|
||||
|
@ -1084,25 +1026,6 @@ def _create_repo_path(
|
|||
return create_repo_path
|
||||
|
||||
|
||||
def _export_gpg_key(
|
||||
ctx: Context, key_id: str, repo_path: pathlib.Path, create_repo_path: pathlib.Path
|
||||
):
|
||||
keyfile_gpg = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg")
|
||||
if keyfile_gpg.exists():
|
||||
keyfile_gpg.unlink()
|
||||
ctx.info(
|
||||
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)} ..."
|
||||
)
|
||||
ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id)
|
||||
keyfile_pub = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub")
|
||||
if keyfile_pub.exists():
|
||||
keyfile_pub.unlink()
|
||||
ctx.info(
|
||||
f"Exporting GnuPG Key '{key_id}' to {keyfile_pub.relative_to(repo_path)} ..."
|
||||
)
|
||||
ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id)
|
||||
|
||||
|
||||
def _get_latest_version(*versions: str) -> Version:
|
||||
_versions = []
|
||||
for version in set(versions):
|
||||
|
|
|
@ -5,17 +5,17 @@ These commands are used by pre-commit.
|
|||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import pathlib
|
||||
import shutil
|
||||
from typing import TYPE_CHECKING, cast
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
||||
WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows"
|
||||
TEMPLATES = WORKFLOWS / "templates"
|
||||
|
||||
# Define the command group
|
||||
|
@ -81,11 +81,11 @@ def generate_workflows(ctx: Context):
|
|||
workflow_path = WORKFLOWS / template
|
||||
template_path = TEMPLATES / f"{template}.jinja"
|
||||
ctx.info(
|
||||
f"Generating '{workflow_path.relative_to(REPO_ROOT)}' from "
|
||||
f"template '{template_path.relative_to(REPO_ROOT)}' ..."
|
||||
f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from "
|
||||
f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..."
|
||||
)
|
||||
context = {
|
||||
"template": template_path.relative_to(REPO_ROOT),
|
||||
"template": template_path.relative_to(tools.utils.REPO_ROOT),
|
||||
"workflow_name": workflow_name,
|
||||
"includes": includes,
|
||||
"conclusion_needs": NeedsTracker(),
|
||||
|
|
88
tools/utils.py
Normal file
88
tools/utils.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
# pylint: disable=resource-leakage,broad-except
|
||||
from __future__ import annotations
|
||||
|
||||
import pathlib
|
||||
|
||||
from ptscripts import Context
|
||||
from rich.progress import (
|
||||
BarColumn,
|
||||
Column,
|
||||
DownloadColumn,
|
||||
Progress,
|
||||
TextColumn,
|
||||
TimeRemainingColumn,
|
||||
TransferSpeedColumn,
|
||||
)
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
|
||||
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
|
||||
NIGHTLY_BUCKET_NAME = "salt-project-prod-salt-artifacts-nightly"
|
||||
STAGING_BUCKET_NAME = "salt-project-prod-salt-artifacts-staging"
|
||||
RELEASE_BUCKET_NAME = "salt-project-prod-salt-artifacts-release"
|
||||
|
||||
|
||||
class UpdateProgress:
|
||||
def __init__(self, progress, task):
|
||||
self.progress = progress
|
||||
self.task = task
|
||||
|
||||
def __call__(self, chunk_size):
|
||||
self.progress.update(self.task, advance=chunk_size)
|
||||
|
||||
|
||||
def create_progress_bar(file_progress: bool = False, **kwargs):
|
||||
if file_progress:
|
||||
return Progress(
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
BarColumn(),
|
||||
DownloadColumn(),
|
||||
TransferSpeedColumn(),
|
||||
TextColumn("eta"),
|
||||
TimeRemainingColumn(),
|
||||
**kwargs,
|
||||
)
|
||||
return Progress(
|
||||
TextColumn(
|
||||
"[progress.description]{task.description}", table_column=Column(ratio=3)
|
||||
),
|
||||
BarColumn(),
|
||||
expand=True,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
|
||||
def export_gpg_key(
|
||||
ctx: Context, key_id: str, repo_path: pathlib.Path, create_repo_path: pathlib.Path
|
||||
):
|
||||
keyfile_gpg = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg")
|
||||
if keyfile_gpg.exists():
|
||||
keyfile_gpg.unlink()
|
||||
ctx.info(
|
||||
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)} ..."
|
||||
)
|
||||
ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id)
|
||||
keyfile_pub = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub")
|
||||
if keyfile_pub.exists():
|
||||
keyfile_pub.unlink()
|
||||
ctx.info(
|
||||
f"Exporting GnuPG Key '{key_id}' to {keyfile_pub.relative_to(repo_path)} ..."
|
||||
)
|
||||
ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id)
|
||||
|
||||
|
||||
def gpg_sign(ctx: Context, key_id: str, path: pathlib.Path):
|
||||
ctx.info(f"GPG Signing '{path}' ...")
|
||||
signature_fpath = path.parent / f"{path.name}.asc"
|
||||
if signature_fpath.exists():
|
||||
signature_fpath.unlink()
|
||||
ctx.run(
|
||||
"gpg",
|
||||
"--local-user",
|
||||
key_id,
|
||||
"--output",
|
||||
str(signature_fpath),
|
||||
"--armor",
|
||||
"--detach-sign",
|
||||
"--sign",
|
||||
str(path),
|
||||
)
|
17
tools/vm.py
17
tools/vm.py
|
@ -22,6 +22,8 @@ from typing import TYPE_CHECKING, cast
|
|||
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
|
||||
try:
|
||||
import attr
|
||||
import boto3
|
||||
|
@ -52,12 +54,11 @@ if TYPE_CHECKING:
|
|||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
REPO_ROOT = pathlib.Path(__file__).parent.parent
|
||||
STATE_DIR = REPO_ROOT / ".vms-state"
|
||||
with REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
|
||||
STATE_DIR = tools.utils.REPO_ROOT / ".vms-state"
|
||||
with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
|
||||
AMIS = json.load(rfh)
|
||||
REPO_CHECKOUT_ID = hashlib.sha256(
|
||||
"|".join(list(platform.uname()) + [str(REPO_ROOT)]).encode()
|
||||
"|".join(list(platform.uname()) + [str(tools.utils.REPO_ROOT)]).encode()
|
||||
).hexdigest()
|
||||
AWS_REGION = (
|
||||
os.environ.get("AWS_DEFAULT_REGION") or os.environ.get("AWS_REGION") or "us-west-2"
|
||||
|
@ -982,7 +983,7 @@ class VM:
|
|||
"--exclude",
|
||||
".pytest_cache/",
|
||||
"--exclude",
|
||||
f"{STATE_DIR.relative_to(REPO_ROOT)}{os.path.sep}",
|
||||
f"{STATE_DIR.relative_to(tools.utils.REPO_ROOT)}{os.path.sep}",
|
||||
"--exclude",
|
||||
"*.py~",
|
||||
# We need to include artifacts/ to be able to include artifacts/salt
|
||||
|
@ -999,7 +1000,7 @@ class VM:
|
|||
# symlink with a copy of what's getting symlinked.
|
||||
rsync_flags.append("--copy-links")
|
||||
# Local repo path
|
||||
source = f"{REPO_ROOT}{os.path.sep}"
|
||||
source = f"{tools.utils.REPO_ROOT}{os.path.sep}"
|
||||
# Remote repo path
|
||||
remote_path = self.upload_path.as_posix()
|
||||
if self.is_windows:
|
||||
|
@ -1014,7 +1015,7 @@ class VM:
|
|||
return
|
||||
write_env = {k: str(v) for (k, v) in env.items()}
|
||||
write_env_filename = ".ci-env"
|
||||
write_env_filepath = REPO_ROOT / ".ci-env"
|
||||
write_env_filepath = tools.utils.REPO_ROOT / ".ci-env"
|
||||
write_env_filepath.write_text(json.dumps(write_env))
|
||||
|
||||
# Local path
|
||||
|
@ -1241,7 +1242,7 @@ class VM:
|
|||
_ssh_command_args = [
|
||||
ssh,
|
||||
"-F",
|
||||
str(self.ssh_config_file.relative_to(REPO_ROOT)),
|
||||
str(self.ssh_config_file.relative_to(tools.utils.REPO_ROOT)),
|
||||
]
|
||||
if ssh_options:
|
||||
_ssh_command_args.extend(ssh_options)
|
||||
|
|
Loading…
Add table
Reference in a new issue