Add tools/utils.py and move some common functionality there.

Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
Pedro Algarvio 2023-02-09 06:18:37 +00:00 committed by Pedro Algarvio
parent f57bf390a3
commit 4c8cc60629
8 changed files with 151 additions and 132 deletions

View file

@ -7,16 +7,15 @@ from __future__ import annotations
import datetime import datetime
import logging import logging
import os import os
import pathlib
import subprocess import subprocess
import sys import sys
import textwrap import textwrap
from ptscripts import Context, command_group from ptscripts import Context, command_group
log = logging.getLogger(__name__) import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent log = logging.getLogger(__name__)
# Define the command group # Define the command group
changelog = command_group( changelog = command_group(
@ -25,7 +24,7 @@ changelog = command_group(
description=__doc__, description=__doc__,
venv_config={ venv_config={
"requirements_files": [ "requirements_files": [
REPO_ROOT tools.utils.REPO_ROOT
/ "requirements" / "requirements"
/ "static" / "static"
/ "ci" / "ci"

View file

@ -13,9 +13,9 @@ from typing import TYPE_CHECKING
from ptscripts import Context, command_group from ptscripts import Context, command_group
log = logging.getLogger(__name__) import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent log = logging.getLogger(__name__)
# Define the command group # Define the command group
ci = command_group(name="ci", help="CI Related Commands", description=__doc__) ci = command_group(name="ci", help="CI Related Commands", description=__doc__)
@ -107,7 +107,11 @@ def process_changed_files(ctx: Context, event_name: str, changed_files: pathlib.
if not entry: if not entry:
loaded_data.remove(entry) loaded_data.remove(entry)
try: try:
entry = REPO_ROOT.joinpath(entry).resolve().relative_to(REPO_ROOT) entry = (
tools.utils.REPO_ROOT.joinpath(entry)
.resolve()
.relative_to(tools.utils.REPO_ROOT)
)
except ValueError: except ValueError:
ctx.error( ctx.error(
f"While processing the changed files key {key!r}, the " f"While processing the changed files key {key!r}, the "
@ -417,10 +421,12 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path):
wfh.write("</pre>\n</details>\n") wfh.write("</pre>\n</details>\n")
testrun = {"type": "full"} testrun = {"type": "full"}
else: else:
testrun_changed_files_path = REPO_ROOT / "testrun-changed-files.txt" testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt"
testrun = { testrun = {
"type": "changed", "type": "changed",
"from-filenames": str(testrun_changed_files_path.relative_to(REPO_ROOT)), "from-filenames": str(
testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT)
),
} }
ctx.info(f"Writing {testrun_changed_files_path.name} ...") ctx.info(f"Writing {testrun_changed_files_path.name} ...")
selected_changed_files = [] selected_changed_files = []

View file

@ -12,9 +12,9 @@ import sys
from ptscripts import Context, command_group from ptscripts import Context, command_group
log = logging.getLogger(__name__) import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent log = logging.getLogger(__name__)
# Define the command group # Define the command group
docs = command_group( docs = command_group(
@ -23,7 +23,7 @@ docs = command_group(
description=__doc__, description=__doc__,
venv_config={ venv_config={
"requirements_files": [ "requirements_files": [
REPO_ROOT tools.utils.REPO_ROOT
/ "requirements" / "requirements"
/ "static" / "static"
/ "ci" / "ci"

View file

@ -19,9 +19,9 @@ import tempfile
import yaml import yaml
from ptscripts import Context, command_group from ptscripts import Context, command_group
log = logging.getLogger(__name__) import tools.utils
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent log = logging.getLogger(__name__)
# Define the command group # Define the command group
pkg = command_group(name="pkg", help="Packaging Related Commands", description=__doc__) pkg = command_group(name="pkg", help="Packaging Related Commands", description=__doc__)
@ -112,14 +112,14 @@ def set_salt_version(
""" """
Write the Salt version to 'salt/_version.txt' Write the Salt version to 'salt/_version.txt'
""" """
salt_version_file = REPO_ROOT / "salt" / "_version.txt" salt_version_file = tools.utils.REPO_ROOT / "salt" / "_version.txt"
if salt_version_file.exists(): if salt_version_file.exists():
if not overwrite: if not overwrite:
ctx.error("The 'salt/_version.txt' file already exists") ctx.error("The 'salt/_version.txt' file already exists")
ctx.exit(1) ctx.exit(1)
salt_version_file.unlink() salt_version_file.unlink()
if salt_version is None: if salt_version is None:
if not REPO_ROOT.joinpath(".git").exists(): if not tools.utils.REPO_ROOT.joinpath(".git").exists():
ctx.error( ctx.error(
"Apparently not running from a Salt repository checkout. " "Apparently not running from a Salt repository checkout. "
"Unable to discover the Salt version." "Unable to discover the Salt version."
@ -133,7 +133,7 @@ def set_salt_version(
ctx.info(f"Validating and normalizing the salt version {salt_version!r}...") ctx.info(f"Validating and normalizing the salt version {salt_version!r}...")
with ctx.virtualenv( with ctx.virtualenv(
name="set-salt-version", name="set-salt-version",
requirements_files=[REPO_ROOT / "requirements" / "base.txt"], requirements_files=[tools.utils.REPO_ROOT / "requirements" / "base.txt"],
) as venv: ) as venv:
code = f""" code = f"""
import sys import sys
@ -153,14 +153,14 @@ def set_salt_version(
ctx.exit(ctx.returncode) ctx.exit(ctx.returncode)
salt_version = ret.stdout.strip().decode() salt_version = ret.stdout.strip().decode()
if not REPO_ROOT.joinpath("salt").is_dir(): if not tools.utils.REPO_ROOT.joinpath("salt").is_dir():
ctx.error( ctx.error(
"The path 'salt/' is not a directory. Unable to write 'salt/_version.txt'" "The path 'salt/' is not a directory. Unable to write 'salt/_version.txt'"
) )
ctx.exit(1) ctx.exit(1)
try: try:
REPO_ROOT.joinpath("salt/_version.txt").write_text(salt_version) tools.utils.REPO_ROOT.joinpath("salt/_version.txt").write_text(salt_version)
except Exception as exc: except Exception as exc:
ctx.error(f"Unable to write 'salt/_version.txt': {exc}") ctx.error(f"Unable to write 'salt/_version.txt': {exc}")
ctx.exit(1) ctx.exit(1)
@ -211,7 +211,9 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
When running on Windows and macOS, some additional cleanup is also done. When running on Windows and macOS, some additional cleanup is also done.
""" """
with open(str(REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")) as rfh: with open(
str(tools.utils.REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")
) as rfh:
patterns = yaml.safe_load(rfh.read()) patterns = yaml.safe_load(rfh.read())
if pkg: if pkg:
@ -317,7 +319,7 @@ def generate_hashes(ctx: Context, files: list[pathlib.Path]):
name="source-tarball", name="source-tarball",
venv_config={ venv_config={
"requirements_files": [ "requirements_files": [
REPO_ROOT / "requirements" / "build.txt", tools.utils.REPO_ROOT / "requirements" / "build.txt",
] ]
}, },
) )
@ -342,20 +344,20 @@ def source_tarball(ctx: Context):
"-m", "-m",
"build", "build",
"--sdist", "--sdist",
str(REPO_ROOT), str(tools.utils.REPO_ROOT),
env=env, env=env,
check=True, check=True,
) )
# Recreate sdist to be reproducible # Recreate sdist to be reproducible
recompress = Recompress(timestamp) recompress = Recompress(timestamp)
for targz in REPO_ROOT.joinpath("dist").glob("*.tar.gz"): for targz in tools.utils.REPO_ROOT.joinpath("dist").glob("*.tar.gz"):
ctx.info(f"Re-compressing {targz.relative_to(REPO_ROOT)} ...") ctx.info(f"Re-compressing {targz.relative_to(tools.utils.REPO_ROOT)} ...")
recompress.recompress(targz) recompress.recompress(targz)
sha256sum = shutil.which("sha256sum") sha256sum = shutil.which("sha256sum")
if sha256sum: if sha256sum:
packages = [ packages = [
str(pkg.relative_to(REPO_ROOT)) str(pkg.relative_to(tools.utils.REPO_ROOT))
for pkg in REPO_ROOT.joinpath("dist").iterdir() for pkg in tools.utils.REPO_ROOT.joinpath("dist").iterdir()
] ]
ctx.run("sha256sum", *packages) ctx.run("sha256sum", *packages)
ctx.run("python3", "-m", "twine", "check", "dist/*", check=True) ctx.run("python3", "-m", "twine", "check", "dist/*", check=True)

View file

@ -19,19 +19,11 @@ import packaging.version
from ptscripts import Context, command_group from ptscripts import Context, command_group
import tools.pkg import tools.pkg
import tools.utils
try: try:
import boto3 import boto3
from botocore.exceptions import ClientError from botocore.exceptions import ClientError
from rich.progress import (
BarColumn,
Column,
DownloadColumn,
Progress,
TextColumn,
TimeRemainingColumn,
TransferSpeedColumn,
)
except ImportError: except ImportError:
print( print(
"\nPlease run 'python -m pip install -r " "\nPlease run 'python -m pip install -r "
@ -43,12 +35,6 @@ except ImportError:
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
NIGHTLY_BUCKET_NAME = "salt-project-prod-salt-artifacts-nightly"
STAGING_BUCKET_NAME = "salt-project-prod-salt-artifacts-staging"
RELEASE_BUCKET_NAME = "salt-project-prod-salt-artifacts-release"
# Define the command group # Define the command group
repo = command_group( repo = command_group(
name="repo", name="repo",
@ -227,7 +213,7 @@ def debian(
ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config))
# Export the GPG key in use # Export the GPG key in use
_export_gpg_key(ctx, key_id, repo_path, create_repo_path) tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
pool_path = create_repo_path / "pool" pool_path = create_repo_path / "pool"
pool_path.mkdir(exist_ok=True) pool_path.mkdir(exist_ok=True)
@ -415,7 +401,7 @@ def rpm(
) )
# Export the GPG key in use # Export the GPG key in use
_export_gpg_key(ctx, key_id, repo_path, create_repo_path) tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
for fpath in incoming.iterdir(): for fpath in incoming.iterdir():
if ".src" in fpath.suffixes: if ".src" in fpath.suffixes:
@ -775,9 +761,9 @@ def _create_onedir_based_repo(
repo_json_path = create_repo_path.parent / "repo.json" repo_json_path = create_repo_path.parent / "repo.json"
if nightly_build: if nightly_build:
bucket_name = NIGHTLY_BUCKET_NAME bucket_name = tools.utils.NIGHTLY_BUCKET_NAME
else: else:
bucket_name = STAGING_BUCKET_NAME bucket_name = tools.utils.STAGING_BUCKET_NAME
release_json = {} release_json = {}
@ -827,24 +813,10 @@ def _create_onedir_based_repo(
for fpath in create_repo_path.iterdir(): for fpath in create_repo_path.iterdir():
if fpath.suffix in pkg_suffixes: if fpath.suffix in pkg_suffixes:
continue continue
ctx.info(f"GPG Signing '{fpath.relative_to(repo_path)}' ...") tools.utils.gpg_sign(ctx, key_id, fpath)
signature_fpath = fpath.parent / f"{fpath.name}.asc"
if signature_fpath.exists():
signature_fpath.unlink()
ctx.run(
"gpg",
"--local-user",
key_id,
"--output",
str(signature_fpath),
"--armor",
"--detach-sign",
"--sign",
str(fpath),
)
# Export the GPG key in use # Export the GPG key in use
_export_gpg_key(ctx, key_id, repo_path, create_repo_path) tools.utils.export_gpg_key(ctx, key_id, repo_path, create_repo_path)
repo_json = _get_repo_json_file_contents( repo_json = _get_repo_json_file_contents(
ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path
@ -902,13 +874,13 @@ def _get_repo_json_file_contents(
ctx.info(f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file") ctx.info(f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file")
size = ret["ContentLength"] size = ret["ContentLength"]
with repo_json_path.open("wb") as wfh: with repo_json_path.open("wb") as wfh:
with create_progress_bar(file_progress=True) as progress: with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Downloading...", total=size) task = progress.add_task(description="Downloading...", total=size)
s3.download_fileobj( s3.download_fileobj(
Bucket=bucket_name, Bucket=bucket_name,
Key=str(repo_json_path.relative_to(repo_path)), Key=str(repo_json_path.relative_to(repo_path)),
Fileobj=wfh, Fileobj=wfh,
Callback=UpdateProgress(progress, task), Callback=tools.utils.UpdateProgress(progress, task),
) )
with repo_json_path.open() as rfh: with repo_json_path.open() as rfh:
repo_json = json.load(rfh) repo_json = json.load(rfh)
@ -950,11 +922,11 @@ def _publish_repo(
Publish packaging repositories. Publish packaging repositories.
""" """
if nightly_build: if nightly_build:
bucket_name = NIGHTLY_BUCKET_NAME bucket_name = tools.utils.NIGHTLY_BUCKET_NAME
elif stage: elif stage:
bucket_name = STAGING_BUCKET_NAME bucket_name = tools.utils.STAGING_BUCKET_NAME
else: else:
bucket_name = RELEASE_BUCKET_NAME bucket_name = tools.utils.RELEASE_BUCKET_NAME
ctx.info("Preparing upload ...") ctx.info("Preparing upload ...")
s3 = boto3.client("s3") s3 = boto3.client("s3")
@ -991,7 +963,7 @@ def _publish_repo(
path = pathlib.Path(dirpath, fpath) path = pathlib.Path(dirpath, fpath)
to_upload_paths.append(path) to_upload_paths.append(path)
with create_progress_bar() as progress: with tools.utils.create_progress_bar() as progress:
task = progress.add_task( task = progress.add_task(
"Deleting directories to override.", total=len(to_delete_paths) "Deleting directories to override.", total=len(to_delete_paths)
) )
@ -1015,48 +987,18 @@ def _publish_repo(
relpath = upload_path.relative_to(repo_path) relpath = upload_path.relative_to(repo_path)
size = upload_path.stat().st_size size = upload_path.stat().st_size
ctx.info(f" {relpath}") ctx.info(f" {relpath}")
with create_progress_bar(file_progress=True) as progress: with tools.utils.create_progress_bar(file_progress=True) as progress:
task = progress.add_task(description="Uploading...", total=size) task = progress.add_task(description="Uploading...", total=size)
s3.upload_file( s3.upload_file(
str(upload_path), str(upload_path),
bucket_name, bucket_name,
str(relpath), str(relpath),
Callback=UpdateProgress(progress, task), Callback=tools.utils.UpdateProgress(progress, task),
) )
except KeyboardInterrupt: except KeyboardInterrupt:
pass pass
class UpdateProgress:
def __init__(self, progress, task):
self.progress = progress
self.task = task
def __call__(self, chunk_size):
self.progress.update(self.task, advance=chunk_size)
def create_progress_bar(file_progress: bool = False, **kwargs):
if file_progress:
return Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(),
DownloadColumn(),
TransferSpeedColumn(),
TextColumn("eta"),
TimeRemainingColumn(),
**kwargs,
)
return Progress(
TextColumn(
"[progress.description]{task.description}", table_column=Column(ratio=3)
),
BarColumn(),
expand=True,
**kwargs,
)
def _create_repo_path( def _create_repo_path(
repo_path: pathlib.Path, repo_path: pathlib.Path,
salt_version: str, salt_version: str,
@ -1084,25 +1026,6 @@ def _create_repo_path(
return create_repo_path return create_repo_path
def _export_gpg_key(
ctx: Context, key_id: str, repo_path: pathlib.Path, create_repo_path: pathlib.Path
):
keyfile_gpg = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg")
if keyfile_gpg.exists():
keyfile_gpg.unlink()
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)} ..."
)
ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id)
keyfile_pub = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub")
if keyfile_pub.exists():
keyfile_pub.unlink()
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_pub.relative_to(repo_path)} ..."
)
ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id)
def _get_latest_version(*versions: str) -> Version: def _get_latest_version(*versions: str) -> Version:
_versions = [] _versions = []
for version in set(versions): for version in set(versions):

View file

@ -5,17 +5,17 @@ These commands are used by pre-commit.
from __future__ import annotations from __future__ import annotations
import logging import logging
import pathlib
import shutil import shutil
from typing import TYPE_CHECKING, cast from typing import TYPE_CHECKING, cast
from jinja2 import Environment, FileSystemLoader from jinja2 import Environment, FileSystemLoader
from ptscripts import Context, command_group from ptscripts import Context, command_group
import tools.utils
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows"
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
TEMPLATES = WORKFLOWS / "templates" TEMPLATES = WORKFLOWS / "templates"
# Define the command group # Define the command group
@ -81,11 +81,11 @@ def generate_workflows(ctx: Context):
workflow_path = WORKFLOWS / template workflow_path = WORKFLOWS / template
template_path = TEMPLATES / f"{template}.jinja" template_path = TEMPLATES / f"{template}.jinja"
ctx.info( ctx.info(
f"Generating '{workflow_path.relative_to(REPO_ROOT)}' from " f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from "
f"template '{template_path.relative_to(REPO_ROOT)}' ..." f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..."
) )
context = { context = {
"template": template_path.relative_to(REPO_ROOT), "template": template_path.relative_to(tools.utils.REPO_ROOT),
"workflow_name": workflow_name, "workflow_name": workflow_name,
"includes": includes, "includes": includes,
"conclusion_needs": NeedsTracker(), "conclusion_needs": NeedsTracker(),

88
tools/utils.py Normal file
View file

@ -0,0 +1,88 @@
# pylint: disable=resource-leakage,broad-except
from __future__ import annotations
import pathlib
from ptscripts import Context
from rich.progress import (
BarColumn,
Column,
DownloadColumn,
Progress,
TextColumn,
TimeRemainingColumn,
TransferSpeedColumn,
)
REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent
GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023"
NIGHTLY_BUCKET_NAME = "salt-project-prod-salt-artifacts-nightly"
STAGING_BUCKET_NAME = "salt-project-prod-salt-artifacts-staging"
RELEASE_BUCKET_NAME = "salt-project-prod-salt-artifacts-release"
class UpdateProgress:
def __init__(self, progress, task):
self.progress = progress
self.task = task
def __call__(self, chunk_size):
self.progress.update(self.task, advance=chunk_size)
def create_progress_bar(file_progress: bool = False, **kwargs):
if file_progress:
return Progress(
TextColumn("[progress.description]{task.description}"),
BarColumn(),
DownloadColumn(),
TransferSpeedColumn(),
TextColumn("eta"),
TimeRemainingColumn(),
**kwargs,
)
return Progress(
TextColumn(
"[progress.description]{task.description}", table_column=Column(ratio=3)
),
BarColumn(),
expand=True,
**kwargs,
)
def export_gpg_key(
ctx: Context, key_id: str, repo_path: pathlib.Path, create_repo_path: pathlib.Path
):
keyfile_gpg = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg")
if keyfile_gpg.exists():
keyfile_gpg.unlink()
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg.relative_to(repo_path)} ..."
)
ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id)
keyfile_pub = create_repo_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub")
if keyfile_pub.exists():
keyfile_pub.unlink()
ctx.info(
f"Exporting GnuPG Key '{key_id}' to {keyfile_pub.relative_to(repo_path)} ..."
)
ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id)
def gpg_sign(ctx: Context, key_id: str, path: pathlib.Path):
ctx.info(f"GPG Signing '{path}' ...")
signature_fpath = path.parent / f"{path.name}.asc"
if signature_fpath.exists():
signature_fpath.unlink()
ctx.run(
"gpg",
"--local-user",
key_id,
"--output",
str(signature_fpath),
"--armor",
"--detach-sign",
"--sign",
str(path),
)

View file

@ -22,6 +22,8 @@ from typing import TYPE_CHECKING, cast
from ptscripts import Context, command_group from ptscripts import Context, command_group
import tools.utils
try: try:
import attr import attr
import boto3 import boto3
@ -52,12 +54,11 @@ if TYPE_CHECKING:
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
REPO_ROOT = pathlib.Path(__file__).parent.parent STATE_DIR = tools.utils.REPO_ROOT / ".vms-state"
STATE_DIR = REPO_ROOT / ".vms-state" with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
with REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh:
AMIS = json.load(rfh) AMIS = json.load(rfh)
REPO_CHECKOUT_ID = hashlib.sha256( REPO_CHECKOUT_ID = hashlib.sha256(
"|".join(list(platform.uname()) + [str(REPO_ROOT)]).encode() "|".join(list(platform.uname()) + [str(tools.utils.REPO_ROOT)]).encode()
).hexdigest() ).hexdigest()
AWS_REGION = ( AWS_REGION = (
os.environ.get("AWS_DEFAULT_REGION") or os.environ.get("AWS_REGION") or "us-west-2" os.environ.get("AWS_DEFAULT_REGION") or os.environ.get("AWS_REGION") or "us-west-2"
@ -982,7 +983,7 @@ class VM:
"--exclude", "--exclude",
".pytest_cache/", ".pytest_cache/",
"--exclude", "--exclude",
f"{STATE_DIR.relative_to(REPO_ROOT)}{os.path.sep}", f"{STATE_DIR.relative_to(tools.utils.REPO_ROOT)}{os.path.sep}",
"--exclude", "--exclude",
"*.py~", "*.py~",
# We need to include artifacts/ to be able to include artifacts/salt # We need to include artifacts/ to be able to include artifacts/salt
@ -999,7 +1000,7 @@ class VM:
# symlink with a copy of what's getting symlinked. # symlink with a copy of what's getting symlinked.
rsync_flags.append("--copy-links") rsync_flags.append("--copy-links")
# Local repo path # Local repo path
source = f"{REPO_ROOT}{os.path.sep}" source = f"{tools.utils.REPO_ROOT}{os.path.sep}"
# Remote repo path # Remote repo path
remote_path = self.upload_path.as_posix() remote_path = self.upload_path.as_posix()
if self.is_windows: if self.is_windows:
@ -1014,7 +1015,7 @@ class VM:
return return
write_env = {k: str(v) for (k, v) in env.items()} write_env = {k: str(v) for (k, v) in env.items()}
write_env_filename = ".ci-env" write_env_filename = ".ci-env"
write_env_filepath = REPO_ROOT / ".ci-env" write_env_filepath = tools.utils.REPO_ROOT / ".ci-env"
write_env_filepath.write_text(json.dumps(write_env)) write_env_filepath.write_text(json.dumps(write_env))
# Local path # Local path
@ -1241,7 +1242,7 @@ class VM:
_ssh_command_args = [ _ssh_command_args = [
ssh, ssh,
"-F", "-F",
str(self.ssh_config_file.relative_to(REPO_ROOT)), str(self.ssh_config_file.relative_to(tools.utils.REPO_ROOT)),
] ]
if ssh_options: if ssh_options:
_ssh_command_args.extend(ssh_options) _ssh_command_args.extend(ssh_options)