Allow excluding paths when cleaning up archives

Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
Pedro Algarvio 2024-01-13 20:29:52 +00:00 committed by Pedro Algarvio
parent 83e66d30b7
commit 3fb1279f32
9 changed files with 47 additions and 7 deletions

View file

@ -169,7 +169,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Write Changed Files To A Local File
run:

View file

@ -221,7 +221,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Write Changed Files To A Local File
run:

View file

@ -115,7 +115,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
download-onedir-artifact:
name: Download Staging Onedir Artifact

View file

@ -211,7 +211,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Write Changed Files To A Local File
run:

View file

@ -199,7 +199,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" | tee -a "$GITHUB_OUTPUT"
- name: Check Existing Releases
env:

View file

@ -9,6 +9,7 @@
<%- set gpg_key_id = "64CBBC8173D76B3F" %>
<%- set prepare_actual_release = prepare_actual_release | default(False) %>
<%- set gh_actions_workflows_python_version = "3.10" %>
<%- set nox_archive_hashfiles = "${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py', 'pkg/common/env-cleanup-rules.yml') }}" %>
---
<%- block name %>
name: <{ workflow_name }>
@ -216,7 +217,7 @@ jobs:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT"
<%- if prepare_actual_release %>

View file

@ -143,7 +143,7 @@ permissions:
- name: Get Hash For Nox Tarball Cache
id: nox-archive-hash
run: |
echo "nox-archive-hash=${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }}" | tee -a "$GITHUB_OUTPUT"
echo "nox-archive-hash=<{ nox_archive_hashfiles }>" | tee -a "$GITHUB_OUTPUT"
<%- endblock prepare_workflow_job %>
<%- endif %>

View file

@ -1,5 +1,8 @@
---
common:
exclude_patterns: &common_exclude_patterns
- "**/site-packages/ansible/plugins/test"
- "**/site-packages/ansible/plugins/test/**"
dir_patterns: &common_dir_patterns
- "**/__pycache__"
- "**/lib/python3.*/test"
@ -24,16 +27,22 @@ common:
ci:
darwin:
exclude_patterns: &ci_darwin_exclude_patterns
- *common_exclude_patterns
dir_patterns: &ci_darwin_dir_patterns
- *common_dir_patterns
file_patterns: &ci_darwin_file_patterns
- *common_file_patterns
linux:
exclude_patterns: &ci_linux_exclude_patterns
- *common_exclude_patterns
dir_patterns: &ci_linux_dir_patterns
- *common_dir_patterns
file_patterns: &ci_linux_file_patterns
- *common_file_patterns
windows:
exclude_patterns: &ci_windows_exclude_patterns
- *common_exclude_patterns
dir_patterns: &ci_windows_dir_patterns
- *common_dir_patterns
- "**/artifacts/salt/configs"
@ -52,6 +61,8 @@ ci:
pkg:
darwin:
exclude_patterns:
- *ci_darwin_exclude_patterns
dir_patterns:
- *ci_darwin_dir_patterns
- "**/pkgconfig"
@ -62,11 +73,15 @@ pkg:
file_patterns:
- *ci_darwin_file_patterns
linux:
exclude_patterns:
- *ci_linux_exclude_patterns
dir_patterns:
- *ci_linux_dir_patterns
file_patterns:
- *ci_linux_file_patterns
windows:
exclude_patterns:
- *ci_windows_exclude_patterns
dir_patterns:
- *ci_windows_dir_patterns
- "**/salt/share"

View file

@ -257,6 +257,10 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
else:
yield patterns
exclude_patterns = set()
for pattern in unnest_lists(patterns["exclude_patterns"]):
exclude_patterns.add(pattern)
dir_patterns = set()
for pattern in unnest_lists(patterns["dir_patterns"]):
dir_patterns.add(pattern)
@ -271,6 +275,16 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
if not path.exists():
continue
match_path = path.as_posix()
skip_match = False
for pattern in exclude_patterns:
if fnmatch.fnmatch(str(match_path), pattern):
ctx.info(
f"Excluded file: {match_path}; Matching pattern: {pattern!r}"
)
skip_match = True
break
if skip_match:
continue
for pattern in dir_patterns:
if fnmatch.fnmatch(str(match_path), pattern):
ctx.info(
@ -283,6 +297,16 @@ def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False):
if not path.exists():
continue
match_path = path.as_posix()
skip_match = False
for pattern in exclude_patterns:
if fnmatch.fnmatch(str(match_path), pattern):
ctx.info(
f"Excluded file: {match_path}; Matching pattern: {pattern!r}"
)
skip_match = True
break
if skip_match:
continue
for pattern in file_patterns:
if fnmatch.fnmatch(str(match_path), pattern):
ctx.info(