From e005c26fffa3eddd43d9bb7771362e07458cc99e Mon Sep 17 00:00:00 2001 From: Skyler Hawthorne Date: Sat, 25 Nov 2023 07:51:52 -0500 Subject: [PATCH 001/102] delete s3fs: delete files from cache when deleted from bucket --- changelog/65611.fixed.md | 7 ++ salt/fileserver/s3fs.py | 93 ++++++++++++++++++---- tests/pytests/unit/fileserver/test_s3fs.py | 39 ++++++++- 3 files changed, 123 insertions(+), 16 deletions(-) create mode 100644 changelog/65611.fixed.md diff --git a/changelog/65611.fixed.md b/changelog/65611.fixed.md new file mode 100644 index 00000000000..6124658f5fc --- /dev/null +++ b/changelog/65611.fixed.md @@ -0,0 +1,7 @@ +When using s3fs, if files are deleted from the bucket, they were not deleted in +the master or minion local cache, which could lead to unexpected file copies or +even state applications. This change makes the local cache consistent with the +remote bucket by deleting files locally that are deleted from the bucket. + +**NOTE** this could lead to **breakage** on your affected systems if it was +inadvertently depending on previously deleted files. diff --git a/salt/fileserver/s3fs.py b/salt/fileserver/s3fs.py index 01f527b1984..f3760d68fd4 100644 --- a/salt/fileserver/s3fs.py +++ b/salt/fileserver/s3fs.py @@ -135,6 +135,7 @@ def update(): cached_file_path = _get_cached_file_name( bucket, saltenv, file_path ) + log.debug("%s - %s : %s", bucket, saltenv, file_path) # load the file from S3 if it's not in the cache or it's old @@ -356,6 +357,7 @@ def _init(): # check mtime of the buckets files cache metadata = None + try: if os.path.getmtime(cache_file) > exp: metadata = _read_buckets_cache_file(cache_file) @@ -366,6 +368,8 @@ def _init(): # bucket files cache expired or does not exist metadata = _refresh_buckets_cache_file(cache_file) + _prune_deleted_files(metadata) + return metadata @@ -374,7 +378,6 @@ def _get_cache_dir(): Return the path to the s3cache dir """ - # Or is that making too many assumptions? return os.path.join(__opts__["cachedir"], "s3cache") @@ -383,26 +386,15 @@ def _get_cached_file_name(bucket_name, saltenv, path): Return the cached file name for a bucket path file """ - file_path = os.path.join(_get_cache_dir(), saltenv, bucket_name, path) - - # make sure bucket and saltenv directories exist - if not os.path.exists(os.path.dirname(file_path)): - os.makedirs(os.path.dirname(file_path)) - - return file_path + return os.path.join(_get_cache_dir(), saltenv, bucket_name, path) def _get_buckets_cache_filename(): """ Return the filename of the cache for bucket contents. - Create the path if it does not exist. """ - cache_dir = _get_cache_dir() - if not os.path.exists(cache_dir): - os.makedirs(cache_dir) - - return os.path.join(cache_dir, "buckets_files.cache") + return os.path.join(_get_cache_dir(), "buckets_files.cache") def _refresh_buckets_cache_file(cache_file): @@ -423,6 +415,7 @@ def _refresh_buckets_cache_file(cache_file): path_style, https_enable, ) = _get_s3_key() + metadata = {} # helper s3 query function @@ -571,10 +564,71 @@ def _refresh_buckets_cache_file(cache_file): return metadata +def _prune_deleted_files(metadata): + cache_dir = _get_cache_dir() + cached_files = set() + roots = set() + + if _is_env_per_bucket(): + for env, env_data in metadata.items(): + for bucket_meta in env_data: + for bucket, bucket_data in bucket_meta.items(): + root = os.path.join(cache_dir, env, bucket) + + if os.path.exists(root): + roots.add(root) + + for meta in bucket_data: + path = meta["Key"] + cached_files.add(path) + + else: + for env, env_data in metadata.items(): + for bucket in _get_buckets(): + root = os.path.join(cache_dir, bucket) + + if os.path.exists(root): + roots.add(root) + + for meta in env_data: + cached_files.add(meta["Key"]) + + if log.isEnabledFor(logging.DEBUG): + import pprint + + log.debug(f"cached file list: {pprint.pformat(cached_files)}") + + for root in roots: + for base, dirs, files in os.walk(root): + for file_name in files: + path = os.path.join(base, file_name) + relpath = os.path.relpath(path, root) + + if relpath not in cached_files: + log.debug(f"file '{path}' not found in cached file list") + log.info( + f"file '{relpath}' was deleted from bucket, deleting local copy" + ) + + os.unlink(path) + dir = os.path.dirname(path) + + # delete empty dirs all the way up to the cache dir + while dir != cache_dir and len(os.listdir(dir)) == 0: + log.debug(f"directory '{dir}' is now empty, removing") + os.rmdir(dir) + dir = os.path.dirname(dir) + + def _write_buckets_cache_file(metadata, cache_file): """ Write the contents of the buckets cache file """ + cache_dir = _get_cache_dir() + + if not os.path.exists(cache_dir): + os.makedirs(cache_dir) + if os.path.isfile(cache_file): os.remove(cache_file) @@ -591,6 +645,10 @@ def _read_buckets_cache_file(cache_file): log.debug("Reading buckets cache file") + if not os.path.exists(cache_file): + log.debug("Cache file does not exist") + return None + with salt.utils.files.fopen(cache_file, "rb") as fp_: try: data = pickle.load(fp_) @@ -698,6 +756,13 @@ def _get_file_from_s3(metadata, saltenv, bucket_name, path, cached_file_path): Checks the local cache for the file, if it's old or missing go grab the file from S3 and update the cache """ + + # make sure bucket and saltenv directories exist + target_dir = os.path.dirname(cached_file_path) + + if not os.path.exists(target_dir): + os.makedirs(target_dir) + ( key, keyid, diff --git a/tests/pytests/unit/fileserver/test_s3fs.py b/tests/pytests/unit/fileserver/test_s3fs.py index 2c14738a68c..b9e6bb89743 100644 --- a/tests/pytests/unit/fileserver/test_s3fs.py +++ b/tests/pytests/unit/fileserver/test_s3fs.py @@ -78,6 +78,7 @@ def test_update(bucket, s3): "top.sls": {"content": yaml.dump({"base": {"*": ["foo"]}})}, "foo.sls": {"content": yaml.dump({"nginx": {"pkg.installed": []}})}, "files/nginx.conf": {"content": "server {}"}, + "files/conf.d/foo.conf": {"content": "server {}"}, } make_keys(bucket, s3, keys) @@ -90,6 +91,41 @@ def test_update(bucket, s3): s3fs.update() verify_cache(bucket, keys) + # verify that when files get deleted from s3, they also get deleted in + # the local cache + delete_file = "files/nginx.conf" + del keys[delete_file] + s3.delete_object(Bucket=bucket, Key=delete_file) + + s3fs.update() + verify_cache(bucket, keys) + + cache_file = s3fs._get_cached_file_name(bucket, "base", delete_file) + assert not os.path.exists(cache_file) + + # we want empty directories to get deleted from the local cache + + # after this one, `files` should still exist + files_dir = os.path.dirname(cache_file) + assert os.path.exists(files_dir) + + # but after the last file is deleted, the directory and any parents + # should be deleted too + delete_file = "files/conf.d/foo.conf" + del keys[delete_file] + s3.delete_object(Bucket=bucket, Key=delete_file) + + s3fs.update() + verify_cache(bucket, keys) + + cache_file = s3fs._get_cached_file_name(bucket, "base", delete_file) + assert not os.path.exists(cache_file) + + # after this, `files/conf.d` and `files` should be deleted + conf_d_dir = os.path.dirname(cache_file) + assert not os.path.exists(conf_d_dir) + assert not os.path.exists(files_dir) + @pytest.mark.skip_on_fips_enabled_platform def test_s3_hash(bucket, s3): @@ -124,8 +160,7 @@ def test_s3_hash(bucket, s3): @pytest.mark.skip_on_fips_enabled_platform def test_cache_round_trip(bucket): metadata = {"foo": "bar"} - cache_file = s3fs._get_cached_file_name(bucket, "base", "somefile") - + cache_file = s3fs._get_buckets_cache_filename() s3fs._write_buckets_cache_file(metadata, cache_file) assert s3fs._read_buckets_cache_file(cache_file) == metadata From a66262933ee066ebec0487959405ec86b995fbc4 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 1 Mar 2024 10:10:14 +0000 Subject: [PATCH 002/102] Fix lint and formatting issues --- salt/fileserver/s3fs.py | 17 +++++++++-------- tests/pytests/unit/test_pillar.py | 1 - 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/salt/fileserver/s3fs.py b/salt/fileserver/s3fs.py index f3760d68fd4..d3c3d9cd78f 100644 --- a/salt/fileserver/s3fs.py +++ b/salt/fileserver/s3fs.py @@ -596,7 +596,7 @@ def _prune_deleted_files(metadata): if log.isEnabledFor(logging.DEBUG): import pprint - log.debug(f"cached file list: {pprint.pformat(cached_files)}") + log.debug("cached file list:\n%s", pprint.pformat(cached_files)) for root in roots: for base, dirs, files in os.walk(root): @@ -605,19 +605,20 @@ def _prune_deleted_files(metadata): relpath = os.path.relpath(path, root) if relpath not in cached_files: - log.debug(f"file '{path}' not found in cached file list") + log.debug("File '%s' not found in cached file list", path) log.info( - f"file '{relpath}' was deleted from bucket, deleting local copy" + "File '%s' was deleted from bucket, deleting local copy", + relpath, ) os.unlink(path) - dir = os.path.dirname(path) + dirname = os.path.dirname(path) # delete empty dirs all the way up to the cache dir - while dir != cache_dir and len(os.listdir(dir)) == 0: - log.debug(f"directory '{dir}' is now empty, removing") - os.rmdir(dir) - dir = os.path.dirname(dir) + while dirname != cache_dir and len(os.listdir(dirname)) == 0: + log.debug("Directory '%s' is now empty, removing", dirname) + os.rmdir(dirname) + dirname = os.path.dirname(dirname) def _write_buckets_cache_file(metadata, cache_file): diff --git a/tests/pytests/unit/test_pillar.py b/tests/pytests/unit/test_pillar.py index d4a1a4e1c73..d44a337981f 100644 --- a/tests/pytests/unit/test_pillar.py +++ b/tests/pytests/unit/test_pillar.py @@ -6,7 +6,6 @@ ~~~~~~~~~~~~~~~~~~~~~~ """ - import logging import os import shutil From 0d6a8fb30381167ca228367146457bc072933658 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 1 Mar 2024 10:12:04 +0000 Subject: [PATCH 003/102] Add Fedora 39 support to create repo --- tools/pkg/repo/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index f13a8fc1917..700eba3ae6a 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -316,7 +316,7 @@ def debian( _rpm_distro_info = { "amazon": ["2", "2023"], "redhat": ["7", "8", "9"], - "fedora": ["36", "37", "38"], + "fedora": ["36", "37", "38", "39"], "photon": ["3", "4", "5"], } From 8c86b5ec7007b70020c1259776b9c371ef10f846 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Mon, 4 Mar 2024 12:10:29 -0700 Subject: [PATCH 004/102] Make the win_function.get_sam_name function handle Everyone --- changelog/63667.fixed.md | 1 + salt/utils/win_functions.py | 12 +++++ .../pytests/unit/utils/test_win_functions.py | 45 +++++++++++++------ 3 files changed, 44 insertions(+), 14 deletions(-) create mode 100644 changelog/63667.fixed.md diff --git a/changelog/63667.fixed.md b/changelog/63667.fixed.md new file mode 100644 index 00000000000..3015e6f4028 --- /dev/null +++ b/changelog/63667.fixed.md @@ -0,0 +1 @@ +Fix user and group management on Windows to handle the Everyone group diff --git a/salt/utils/win_functions.py b/salt/utils/win_functions.py index c3906b1e210..66327a88007 100644 --- a/salt/utils/win_functions.py +++ b/salt/utils/win_functions.py @@ -184,11 +184,23 @@ def get_sam_name(username): .. note:: Long computer names are truncated to 15 characters """ + # Some special identity groups require special handling. They do not have + # the domain prepended to the name. They should be added here as they are + # discovered. Use the SID to be locale agnostic. + # Everyone: S-1-1-0 + special_id_groups = ["S-1-1-0"] + try: sid_obj = win32security.LookupAccountName(None, username)[0] except pywintypes.error: return "\\".join([platform.node()[:15].upper(), username]) + + sid = win32security.ConvertSidToStringSid(sid_obj) username, domain, _ = win32security.LookupAccountSid(None, sid_obj) + + if sid in special_id_groups: + return username + return "\\".join([domain, username]) diff --git a/tests/pytests/unit/utils/test_win_functions.py b/tests/pytests/unit/utils/test_win_functions.py index ffe68d1b723..4f1e8b39d56 100644 --- a/tests/pytests/unit/utils/test_win_functions.py +++ b/tests/pytests/unit/utils/test_win_functions.py @@ -1,3 +1,5 @@ +import platform + import pytest import salt.utils.win_functions as win_functions @@ -6,6 +8,11 @@ from tests.support.mock import MagicMock, patch HAS_WIN32 = False HAS_PYWIN = False +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, +] + try: import win32net @@ -32,7 +39,6 @@ except ImportError: # Test cases for salt.utils.win_functions. -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_escape_argument_simple(): """ Test to make sure we encode simple arguments correctly @@ -41,7 +47,6 @@ def test_escape_argument_simple(): assert encoded == "simple" -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_escape_argument_with_space(): """ Test to make sure we encode arguments containing spaces correctly @@ -50,7 +55,6 @@ def test_escape_argument_with_space(): assert encoded == '^"with space^"' -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_escape_argument_simple_path(): """ Test to make sure we encode simple path arguments correctly @@ -59,7 +63,6 @@ def test_escape_argument_simple_path(): assert encoded == "C:\\some\\path" -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_escape_argument_path_with_space(): """ Test to make sure we encode path arguments containing spaces correctly @@ -68,7 +71,6 @@ def test_escape_argument_path_with_space(): assert encoded == '^"C:\\Some Path\\With Spaces^"' -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_broadcast_setting_change(): """ Test to rehash the Environment variables @@ -76,14 +78,12 @@ def test_broadcast_setting_change(): assert win_functions.broadcast_setting_change() -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_get_user_groups(): groups = ["Administrators", "Users"] with patch("win32net.NetUserGetLocalGroups", return_value=groups): assert win_functions.get_user_groups("Administrator") == groups -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_get_user_groups_sid(): groups = ["Administrators", "Users"] expected = ["S-1-5-32-544", "S-1-5-32-545"] @@ -91,14 +91,12 @@ def test_get_user_groups_sid(): assert win_functions.get_user_groups("Administrator", sid=True) == expected -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") def test_get_user_groups_system(): groups = ["SYSTEM"] with patch("win32net.NetUserGetLocalGroups", return_value=groups): assert win_functions.get_user_groups("SYSTEM") == groups -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") @pytest.mark.skipif(not HAS_WIN32, reason="Requires Win32 libraries") def test_get_user_groups_unavailable_dc(): groups = ["Administrators", "Users"] @@ -109,7 +107,6 @@ def test_get_user_groups_unavailable_dc(): assert win_functions.get_user_groups("Administrator") == groups -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") @pytest.mark.skipif(not HAS_WIN32, reason="Requires Win32 libraries") def test_get_user_groups_unknown_dc(): groups = ["Administrators", "Users"] @@ -120,7 +117,6 @@ def test_get_user_groups_unknown_dc(): assert win_functions.get_user_groups("Administrator") == groups -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") @pytest.mark.skipif(not HAS_WIN32, reason="Requires Win32 libraries") def test_get_user_groups_missing_permission(): groups = ["Administrators", "Users"] @@ -131,7 +127,6 @@ def test_get_user_groups_missing_permission(): assert win_functions.get_user_groups("Administrator") == groups -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") @pytest.mark.skipif(not HAS_WIN32, reason="Requires Win32 libraries") def test_get_user_groups_error(): win_error = WinError() @@ -142,7 +137,6 @@ def test_get_user_groups_error(): win_functions.get_user_groups("Administrator") -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") @pytest.mark.skipif(not HAS_PYWIN, reason="Requires pywintypes libraries") def test_get_user_groups_local_pywin_error(): win_error = PyWinError() @@ -153,7 +147,6 @@ def test_get_user_groups_local_pywin_error(): win_functions.get_user_groups("Administrator") -@pytest.mark.skip_unless_on_windows(reason="Test is only applicable to Windows.") @pytest.mark.skipif(not HAS_PYWIN, reason="Requires pywintypes libraries") def test_get_user_groups_pywin_error(): win_error = PyWinError() @@ -163,3 +156,27 @@ def test_get_user_groups_pywin_error(): with patch("win32net.NetUserGetGroups", side_effect=mock_error): with pytest.raises(PyWinError): win_functions.get_user_groups("Administrator") + + +@pytest.mark.skipif(not HAS_PYWIN, reason="Requires pywintypes libraries") +def test_get_sam_name_lookup_fails(): + win_error = PyWinError() + mock_error = MagicMock(side_effect=win_error) + with patch("win32security.LookupAccountName", side_effect=mock_error): + expected = "\\".join([platform.node()[:15].upper(), "junk"]) + result = win_functions.get_sam_name("junk") + assert result == expected + + +@pytest.mark.skipif(not HAS_PYWIN, reason="Requires pywintypes libraries") +def test_get_sam_name_everyone(): + expected = "Everyone" + result = win_functions.get_sam_name("Everyone") + assert result == expected + + +@pytest.mark.skipif(not HAS_PYWIN, reason="Requires pywintypes libraries") +def test_get_sam_name(): + expected = "\\".join([platform.node()[:15], "Administrator"]) + result = win_functions.get_sam_name("Administrator") + assert result == expected From 7ad348dd487837cfd503a3bf995c3fda1f44039a Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Tue, 24 Oct 2023 12:07:38 -0700 Subject: [PATCH 005/102] increase test coverage for textfsm module. --- salt/modules/textfsm_mod.py | 15 +++++++++------ tests/support/mock.py | 6 +++++- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/salt/modules/textfsm_mod.py b/salt/modules/textfsm_mod.py index e11dd5afbb7..20323109d9c 100644 --- a/salt/modules/textfsm_mod.py +++ b/salt/modules/textfsm_mod.py @@ -19,7 +19,7 @@ inside the renderer (Jinja, Mako, Genshi, etc.). import logging import os -from salt.utils.files import fopen +import salt.utils.files try: import textfsm @@ -188,11 +188,14 @@ def extract(template_path, raw_text=None, raw_text_file=None, saltenv="base"): # Disabling pylint W8470 to nto complain about fopen. # Unfortunately textFSM needs the file handle rather than the content... # pylint: disable=W8470 - tpl_file_handle = fopen(tpl_cached_path, "r") - # pylint: disable=W8470 - log.debug(tpl_file_handle.read()) - tpl_file_handle.seek(0) # move the object position back at the top of the file - fsm_handler = textfsm.TextFSM(tpl_file_handle) + with salt.utils.files.fopen(tpl_cached_path, "r") as tpl_file_handle: + # pylint: disable=W8470 + tpl_file_data = tpl_file_handle.read() + log.debug(tpl_file_data) + tpl_file_handle.seek( + 0 + ) # move the object position back at the top of the file + fsm_handler = textfsm.TextFSM(tpl_file_handle) except textfsm.TextFSMTemplateError as tfte: log.error("Unable to parse the TextFSM template", exc_info=True) ret["comment"] = ( diff --git a/tests/support/mock.py b/tests/support/mock.py index 63e794f9764..ad3fb6c3359 100644 --- a/tests/support/mock.py +++ b/tests/support/mock.py @@ -71,7 +71,7 @@ class MockFH: self.write = Mock(side_effect=self._write) self.writelines = Mock(side_effect=self._writelines) self.close = Mock() - self.seek = Mock() + self.seek = Mock(side_effect=self._seek) self.__loc = 0 self.__read_data_ok = False @@ -219,6 +219,10 @@ class MockFH: def __exit__(self, exc_type, exc_val, exc_tb): # pylint: disable=unused-argument pass + def _seek(self, pos=0): + self.__loc = pos + self.read_data_iter = self._iterate_read_data(self.read_data) + class MockCall: def __init__(self, *args, **kwargs): From 5832c076323dadf6e9e7e64ff0e6b81c23ecc731 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Tue, 24 Oct 2023 12:25:41 -0700 Subject: [PATCH 006/102] fixing lint issues --- .../pytests/unit/modules/test_textfsm_mod.py | 713 ++++++++++++++++++ 1 file changed, 713 insertions(+) create mode 100644 tests/pytests/unit/modules/test_textfsm_mod.py diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py new file mode 100644 index 00000000000..42634ea6c99 --- /dev/null +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -0,0 +1,713 @@ +""" + :codeauthor: Gareth J. Greenaway +""" + + +import pytest + +import salt.modules.textfsm_mod as textfsm_mod +from tests.support.mock import MagicMock, mock_open, patch + +HAS_LIBS = True +# pylint: disable=import-error,no-name-in-module,unused-import +try: + from textfsm import clitable +except ImportError: + HAS_LIBS = False +# pylint: enable=import-error,no-name-in-module,unused-import + + +pytestmark = [ + pytest.mark.skipif( + not HAS_LIBS, reason="Install textfsm to be able to run this test." + ) +] + + +@pytest.fixture() +def configure_loader_modules(): + return {textfsm_mod: {"__opts__": {}}} + + +def test_virttual_virtual(): + """ + Test __virtual__ + """ + with patch.object(textfsm_mod, "HAS_TEXTFSM", False): + ret = textfsm_mod.__virtual__() + assert ret == ( + False, + "The textfsm execution module failed to load: requires the textfsm library.", + ) + + +def test_extract_cache_file_false(): + """ + Test extract + """ + with patch.dict( + textfsm_mod.__salt__, {"cp.cache_file": MagicMock(return_value=False)} + ): + ret = textfsm_mod.extract( + "salt://textfsm/juniper_version_template", + raw_text_file="s3://junos_ver.txt", + ) + assert not ret["result"] + assert ret["out"] is None + assert ( + ret["comment"] + == "Unable to read the TextFSM template from salt://textfsm/juniper_version_template" + ) + + +def test_extract_cache_file_valid(): + """ + Test extract + """ + + with patch.dict( + textfsm_mod.__salt__, + { + "cp.cache_file": MagicMock( + return_value="/path/to/cache/juniper_version_template" + ) + }, + ): + + textfsm_template = r"""Value Chassis (\S+) +Value Required Model (\S+) +Value Boot (.*) +Value Base (.*) +Value Kernel (.*) +Value Crypto (.*) +Value Documentation (.*) +Value Routing (.*) + +Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + raw_text = """Hostname: router.abc +Model: mx960 +JUNOS Base OS boot [9.1S3.5] +JUNOS Base OS Software Suite [9.1S3.5] +JUNOS Kernel Software Suite [9.1S3.5] +JUNOS Crypto Software Suite [9.1S3.5] +JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5] +JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] +JUNOS Online Documentation [9.1S3.5] +JUNOS Routing Software Suite [9.1S3.5]""" + + with patch("salt.utils.files.fopen", mock_open(read_data=textfsm_template)): + with patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=raw_text)}, + ): + ret = textfsm_mod.extract( + "salt://textfsm/juniper_version_template", + raw_text_file="s3://junos_ver.txt", + ) + assert ret == { + "result": True, + "comment": "", + "out": [ + { + "chassis": "", + "model": "mx960", + "boot": "9.1S3.5", + "base": "9.1S3.5", + "kernel": "9.1S3.5", + "crypto": "9.1S3.5", + "documentation": "9.1S3.5", + "routing": "9.1S3.5", + } + ], + } + + with patch("salt.utils.files.fopen", mock_open(read_data=textfsm_template)): + with patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=raw_text)}, + ): + ret = textfsm_mod.extract( + "salt://textfsm/juniper_version_template", raw_text=raw_text + ) + assert ret == { + "result": True, + "comment": "", + "out": [ + { + "chassis": "", + "model": "mx960", + "boot": "9.1S3.5", + "base": "9.1S3.5", + "kernel": "9.1S3.5", + "crypto": "9.1S3.5", + "documentation": "9.1S3.5", + "routing": "9.1S3.5", + } + ], + } + + +def test_extract_cache_file_raw_text_get_file_str_false(): + """ + Test extract + """ + + with patch.dict( + textfsm_mod.__salt__, + { + "cp.cache_file": MagicMock( + return_value="/path/to/cache/juniper_version_template" + ) + }, + ): + + textfsm_template = r"""Value Chassis (\S+) +Value Required Model (\S+) +Value Boot (.*) +Value Base (.*) +Value Kernel (.*) +Value Crypto (.*) +Value Documentation (.*) +Value Routing (.*) + +Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + raw_text = """Hostname: router.abc +Model: mx960 +JUNOS Base OS boot [9.1S3.5] +JUNOS Base OS Software Suite [9.1S3.5] +JUNOS Kernel Software Suite [9.1S3.5] +JUNOS Crypto Software Suite [9.1S3.5] +JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5] +JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] +JUNOS Online Documentation [9.1S3.5] +JUNOS Routing Software Suite [9.1S3.5]""" + + with patch("salt.utils.files.fopen", mock_open(read_data=textfsm_template)): + with patch.dict( + textfsm_mod.__salt__, {"cp.get_file_str": MagicMock(return_value=False)} + ): + ret = textfsm_mod.extract( + "salt://textfsm/juniper_version_template", + raw_text_file="s3://junos_ver.txt", + ) + assert ret == { + "result": False, + "comment": "Unable to read from s3://junos_ver.txt. Please specify a valid input file or text.", + "out": None, + } + + +def test_extract_cache_file_raw_text_exception(): + """ + Test extract + """ + + with patch.dict( + textfsm_mod.__salt__, + { + "cp.cache_file": MagicMock( + return_value="/path/to/cache/juniper_version_template" + ) + }, + ): + + textfsm_template = r"""Value Chassis (\S+) +Value Required Model (\S+) +Value Boot (.*) +Value Base (.*) +Value Kernel (.*) +Value Crypto (.*) +Value Documentation (.*) +Xalue Routing (.*) + +Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + raw_text = """Hostname: router.abc +Model: mx960 +JUNOS Base OS boot [9.1S3.5] +JUNOS Base OS Software Suite [9.1S3.5] +JUNOS Kernel Software Suite [9.1S3.5] +JUNOS Crypto Software Suite [9.1S3.5] +JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5] +JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] +JUNOS Online Documentation [9.1S3.5] +JUNOS Routing Software Suite [9.1S3.5]""" + + with patch("salt.utils.files.fopen", mock_open(read_data=textfsm_template)): + with patch.dict( + textfsm_mod.__salt__, {"cp.get_file_str": MagicMock(return_value=False)} + ): + ret = textfsm_mod.extract( + "salt://textfsm/juniper_version_template", + raw_text_file="s3://junos_ver.txt", + ) + + assert not ret["result"] + assert "Unable to parse the TextFSM template from " in ret["comment"] + assert ret["out"] is None + + +def test_extract_cache_file_raw_text_false(): + """ + Test extract + """ + + with patch.dict( + textfsm_mod.__salt__, + { + "cp.cache_file": MagicMock( + return_value="/path/to/cache/juniper_version_template" + ) + }, + ): + + textfsm_template = r"""Value Chassis (\S+) +Value Required Model (\S+) +Value Boot (.*) +Value Base (.*) +Value Kernel (.*) +Value Crypto (.*) +Value Documentation (.*) +Value Routing (.*) + +Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + with patch("salt.utils.files.fopen", mock_open(read_data=textfsm_template)): + ret = textfsm_mod.extract( + "salt://textfsm/juniper_version_template", raw_text="" + ) + assert ret == { + "result": False, + "comment": "Please specify a valid input file or text.", + "out": None, + } + + +def test_index_not_clitable(): + """ + Test index + """ + with patch.object(textfsm_mod, "HAS_CLITABLE", False): + ret = textfsm_mod.index( + command="sh ver", + platform="Juniper", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "TextFSM does not seem that has clitable embedded.", + } + + +def test_index_no_textsm_path(): + """ + Test index + """ + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + ret = textfsm_mod.index( + command="sh ver", + platform="Juniper", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="", + ) + assert ret == { + "out": None, + "result": False, + "comment": "No TextFSM templates path specified. Please configure in opts/pillar/function args.", + } + + +def test_index_no_platform(): + """ + Test index + """ + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="", + ) + assert ret == { + "out": None, + "result": False, + "comment": "No platform specified, no platform grain identifier configured.", + } + + +def test_index_no_platform_name_grains(): + """ + Test index + """ + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + with patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Unable to identify the platform name using the textfsm_platform_grain grain.", + } + + +def test_index_platform_name_grains_no_cachedir(): + """ + Test index + """ + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + with patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ): + with patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ): + + with patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value=False)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Unable to fetch from salt://textfsm/. Is the TextFSM path correctly specified?", + } + + +def test_index_platform_name_grains_output_false(): + """ + Test index + """ + mock_open_index = """ +Template, Hostname, Vendor, Command +juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" + + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + with patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ): + with patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ): + with patch.object( + clitable, "open", mock_open(read_data=mock_open_index) + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=False)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Unable to read from salt://textfsm/juniper_version_example. Please specify a valid file or text.", + } + + +def test_index_platform_name_grains_no_output_specified(): + """ + Test index + """ + mock_open_index = """ +Template, Hostname, Vendor, Command +juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" + + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + with patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ): + with patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ): + with patch.object( + clitable, "open", mock_open(read_data=mock_open_index) + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=False)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Please specify a valid output text or file", + } + + +def test_index_platform_name_grains_output_specified(): + """ + Test index + """ + mock_open_index = """ +Template, Hostname, Vendor, Command +juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" + + juniper_version_template_one = r"""Value Chassis (\S+) +Value Required Model (\S+) +Value Boot (.*) +Value Base (.*) +Value Kernel (.*) +Value Crypto (.*) +Value Documentation (.*) +Value Routing (.*) + +Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + juniper_version_template_two = r"""Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + output_text = """ +Hostname: router.abc +Model: mx960 +JUNOS Base OS boot [9.1S3.5] +JUNOS Base OS Software Suite [9.1S3.5] +JUNOS Kernel Software Suite [9.1S3.5] +JUNOS Crypto Software Suite [9.1S3.5] +JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5] +JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] +JUNOS Online Documentation [9.1S3.5] +JUNOS Routing Software Suite [9.1S3.5]""" + + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + with patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ): + with patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ): + mock_read_data = { + "/index": [mock_open_index], + "/juniper_version_template": [ + juniper_version_template_one, + juniper_version_template_two, + ], + } + with patch.object( + clitable, "open", mock_open(read_data=mock_read_data) + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=output_text)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": [ + { + "chassis": "", + "model": "mx960", + "boot": "9.1S3.5", + "base": "9.1S3.5", + "kernel": "9.1S3.5", + "crypto": "9.1S3.5", + "documentation": "9.1S3.5", + "routing": "9.1S3.5", + } + ], + "result": True, + "comment": "", + } + + +def test_index_platform_name_grains_output_specified_no_attribute(): + """ + Test index + """ + mock_open_index = """ +Template, Hostname, Vendor, Command +juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" + + juniper_version_template_one = r"""Value Chassis (\S+) +Value Required Model (\S+) +Value Boot (.*) +Value Base (.*) +Value Kernel (.*) +Value Crypto (.*) +Value Documentation (.*) +Value Routing (.*) + +Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + juniper_version_template_two = r"""Start +# Support multiple chassis systems. + ^\S+:$$ -> Continue.Record + ^${Chassis}:$$ + ^Model: ${Model} + ^JUNOS Base OS boot \[${Boot}\] + ^JUNOS Software Release \[${Base}\] + ^JUNOS Base OS Software Suite \[${Base}\] + ^JUNOS Kernel Software Suite \[${Kernel}\] + ^JUNOS Crypto Software Suite \[${Crypto}\] + ^JUNOS Online Documentation \[${Documentation}\] + ^JUNOS Routing Software Suite \[${Routing}\]""" + + output_text = """ +Hostname: router.abc +Model: mx960 +JUNOS Base OS boot [9.1S3.5] +JUNOS Base OS Software Suite [9.1S3.5] +JUNOS Kernel Software Suite [9.1S3.5] +JUNOS Crypto Software Suite [9.1S3.5] +JUNOS Packet Forwarding Engine Support (M/T Common) [9.1S3.5] +JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] +JUNOS Online Documentation [9.1S3.5] +JUNOS Routing Software Suite [9.1S3.5]""" + + with patch.object(textfsm_mod, "HAS_CLITABLE", True): + with patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ): + with patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ): + mock_read_data = { + "/index": [mock_open_index], + "/juniper_version_template": [ + juniper_version_template_one, + juniper_version_template_two, + ], + } + with patch.object( + clitable, "open", mock_open(read_data=mock_read_data) + ): + with patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=output_text)}, + ): + ret = textfsm_mod.index( + command="sr ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + + assert ret == { + "out": None, + "result": False, + "comment": "Unable to process the output: No template found for attributes: \"{'Command': 'sr ver', 'Platform': 'textfsm_platform_grain'}\"", + } From 93579ead06ad9da7a80ed79cf500b265095d9916 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Wed, 15 Nov 2023 14:24:53 -0800 Subject: [PATCH 007/102] Update tests/pytests/unit/modules/test_textfsm_mod.py Co-authored-by: Pedro Algarvio --- tests/pytests/unit/modules/test_textfsm_mod.py | 15 +-------------- 1 file changed, 1 insertion(+), 14 deletions(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index 42634ea6c99..a6e562443ce 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -8,20 +8,7 @@ import pytest import salt.modules.textfsm_mod as textfsm_mod from tests.support.mock import MagicMock, mock_open, patch -HAS_LIBS = True -# pylint: disable=import-error,no-name-in-module,unused-import -try: - from textfsm import clitable -except ImportError: - HAS_LIBS = False -# pylint: enable=import-error,no-name-in-module,unused-import - - -pytestmark = [ - pytest.mark.skipif( - not HAS_LIBS, reason="Install textfsm to be able to run this test." - ) -] +textfsm = pytest.importorskip("textfsm", reason="Install textfsm to be able to run this test.") @pytest.fixture() From 6da2e379c66152c85acd7811e7ef861c0fcf831d Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Wed, 15 Nov 2023 14:25:04 -0800 Subject: [PATCH 008/102] Update tests/pytests/unit/modules/test_textfsm_mod.py Co-authored-by: Pedro Algarvio --- tests/pytests/unit/modules/test_textfsm_mod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index a6e562443ce..8ade27836f2 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -680,7 +680,7 @@ JUNOS Routing Software Suite [9.1S3.5]""" ], } with patch.object( - clitable, "open", mock_open(read_data=mock_read_data) + textfsm.clitable, "open", mock_open(read_data=mock_read_data) ): with patch.dict( textfsm_mod.__salt__, From 05465fe8985c9bf23ac918d5e59d297896a4ad38 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Wed, 15 Nov 2023 14:25:15 -0800 Subject: [PATCH 009/102] Update tests/pytests/unit/modules/test_textfsm_mod.py Co-authored-by: Pedro Algarvio --- tests/pytests/unit/modules/test_textfsm_mod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index 8ade27836f2..f41f0a2a905 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -575,7 +575,7 @@ JUNOS Routing Software Suite [9.1S3.5]""" ], } with patch.object( - clitable, "open", mock_open(read_data=mock_read_data) + textfsm.clitable, "open", mock_open(read_data=mock_read_data) ): with patch.dict( textfsm_mod.__salt__, From 198395f2633f79c51b71dbfa5af317abb0f09530 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Wed, 15 Nov 2023 14:25:38 -0800 Subject: [PATCH 010/102] Update tests/pytests/unit/modules/test_textfsm_mod.py Co-authored-by: Pedro Algarvio --- tests/pytests/unit/modules/test_textfsm_mod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index f41f0a2a905..4816ad0ca24 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -482,7 +482,7 @@ juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, ): with patch.object( - clitable, "open", mock_open(read_data=mock_open_index) + textfsm.clitable, "open", mock_open(read_data=mock_open_index) ): with patch.dict( textfsm_mod.__salt__, From 24f1f3489a9376fb9a407f45b14951e56f64d9ba Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Wed, 15 Nov 2023 15:06:46 -0800 Subject: [PATCH 011/102] Running pre-commit manually --- tests/pytests/unit/modules/test_textfsm_mod.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index 4816ad0ca24..66fd6cf0846 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -8,7 +8,9 @@ import pytest import salt.modules.textfsm_mod as textfsm_mod from tests.support.mock import MagicMock, mock_open, patch -textfsm = pytest.importorskip("textfsm", reason="Install textfsm to be able to run this test.") +textfsm = pytest.importorskip( + "textfsm", reason="Install textfsm to be able to run this test." +) @pytest.fixture() @@ -442,7 +444,9 @@ juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, ): with patch.object( - clitable, "open", mock_open(read_data=mock_open_index) + textfsm_mod.clitable, + "open", + mock_open(read_data=mock_open_index), ): with patch.dict( textfsm_mod.__salt__, From 5c5f4846272ea41a08d23fde9dcf87650bbb4862 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Thu, 16 Nov 2023 13:52:52 -0800 Subject: [PATCH 012/102] renaming virtual test name, fixing nesting. --- .../pytests/unit/modules/test_textfsm_mod.py | 314 ++++++++---------- 1 file changed, 146 insertions(+), 168 deletions(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index 66fd6cf0846..1f7c99c8d62 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -18,7 +18,7 @@ def configure_loader_modules(): return {textfsm_mod: {"__opts__": {}}} -def test_virttual_virtual(): +def test_dunder_virtual(): """ Test __virtual__ """ @@ -397,30 +397,26 @@ def test_index_platform_name_grains_no_cachedir(): """ Test index """ - with patch.object(textfsm_mod, "HAS_CLITABLE", True): - with patch.dict( - textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} - ): - with patch.dict( - textfsm_mod.__grains__, - {"textfsm_platform_grain": "textfsm_platform_grain"}, - ): - - with patch.dict( - textfsm_mod.__salt__, - {"cp.cache_dir": MagicMock(return_value=False)}, - ): - ret = textfsm_mod.index( - command="sh ver", - platform="", - output_file="salt://textfsm/juniper_version_example", - textfsm_path="salt://textfsm/", - ) - assert ret == { - "out": None, - "result": False, - "comment": "Unable to fetch from salt://textfsm/. Is the TextFSM path correctly specified?", - } + with patch.object(textfsm_mod, "HAS_CLITABLE", True), patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ), patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ), patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value=False)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Unable to fetch from salt://textfsm/. Is the TextFSM path correctly specified?", + } def test_index_platform_name_grains_output_false(): @@ -431,38 +427,33 @@ def test_index_platform_name_grains_output_false(): Template, Hostname, Vendor, Command juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" - with patch.object(textfsm_mod, "HAS_CLITABLE", True): - with patch.dict( - textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} - ): - with patch.dict( - textfsm_mod.__grains__, - {"textfsm_platform_grain": "textfsm_platform_grain"}, - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, - ): - with patch.object( - textfsm_mod.clitable, - "open", - mock_open(read_data=mock_open_index), - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.get_file_str": MagicMock(return_value=False)}, - ): - ret = textfsm_mod.index( - command="sh ver", - platform="", - output_file="salt://textfsm/juniper_version_example", - textfsm_path="salt://textfsm/", - ) - assert ret == { - "out": None, - "result": False, - "comment": "Unable to read from salt://textfsm/juniper_version_example. Please specify a valid file or text.", - } + with patch.object(textfsm_mod, "HAS_CLITABLE", True), patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ), patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ), patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ), patch.object( + textfsm_mod.clitable, + "open", + mock_open(read_data=mock_open_index), + ), patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=False)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Unable to read from salt://textfsm/juniper_version_example. Please specify a valid file or text.", + } def test_index_platform_name_grains_no_output_specified(): @@ -473,35 +464,30 @@ def test_index_platform_name_grains_no_output_specified(): Template, Hostname, Vendor, Command juniper_version_template, .*, Juniper, sh[[ow]] ve[[rsion]]""" - with patch.object(textfsm_mod, "HAS_CLITABLE", True): - with patch.dict( - textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} - ): - with patch.dict( - textfsm_mod.__grains__, - {"textfsm_platform_grain": "textfsm_platform_grain"}, - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, - ): - with patch.object( - textfsm.clitable, "open", mock_open(read_data=mock_open_index) - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.get_file_str": MagicMock(return_value=False)}, - ): - ret = textfsm_mod.index( - command="sh ver", - platform="", - textfsm_path="salt://textfsm/", - ) - assert ret == { - "out": None, - "result": False, - "comment": "Please specify a valid output text or file", - } + with patch.object(textfsm_mod, "HAS_CLITABLE", True), patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ), patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ), patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ), patch.object( + textfsm.clitable, "open", mock_open(read_data=mock_open_index) + ), patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=False)}, + ): + ret = textfsm_mod.index( + command="sh ver", + platform="", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": None, + "result": False, + "comment": "Please specify a valid output text or file", + } def test_index_platform_name_grains_output_specified(): @@ -559,54 +545,50 @@ JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] JUNOS Online Documentation [9.1S3.5] JUNOS Routing Software Suite [9.1S3.5]""" - with patch.object(textfsm_mod, "HAS_CLITABLE", True): - with patch.dict( - textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + with patch.object(textfsm_mod, "HAS_CLITABLE", True), patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ), patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ), patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ): + mock_read_data = { + "/index": [mock_open_index], + "/juniper_version_template": [ + juniper_version_template_one, + juniper_version_template_two, + ], + } + with patch.object( + textfsm.clitable, "open", mock_open(read_data=mock_read_data) + ), patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=output_text)}, ): - with patch.dict( - textfsm_mod.__grains__, - {"textfsm_platform_grain": "textfsm_platform_grain"}, - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, - ): - mock_read_data = { - "/index": [mock_open_index], - "/juniper_version_template": [ - juniper_version_template_one, - juniper_version_template_two, - ], + ret = textfsm_mod.index( + command="sh ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) + assert ret == { + "out": [ + { + "chassis": "", + "model": "mx960", + "boot": "9.1S3.5", + "base": "9.1S3.5", + "kernel": "9.1S3.5", + "crypto": "9.1S3.5", + "documentation": "9.1S3.5", + "routing": "9.1S3.5", } - with patch.object( - textfsm.clitable, "open", mock_open(read_data=mock_read_data) - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.get_file_str": MagicMock(return_value=output_text)}, - ): - ret = textfsm_mod.index( - command="sh ver", - platform="", - output_file="salt://textfsm/juniper_version_example", - textfsm_path="salt://textfsm/", - ) - assert ret == { - "out": [ - { - "chassis": "", - "model": "mx960", - "boot": "9.1S3.5", - "base": "9.1S3.5", - "kernel": "9.1S3.5", - "crypto": "9.1S3.5", - "documentation": "9.1S3.5", - "routing": "9.1S3.5", - } - ], - "result": True, - "comment": "", - } + ], + "result": True, + "comment": "", + } def test_index_platform_name_grains_output_specified_no_attribute(): @@ -664,41 +646,37 @@ JUNOS Packet Forwarding Engine Support (MX Common) [9.1S3.5] JUNOS Online Documentation [9.1S3.5] JUNOS Routing Software Suite [9.1S3.5]""" - with patch.object(textfsm_mod, "HAS_CLITABLE", True): - with patch.dict( - textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + with patch.object(textfsm_mod, "HAS_CLITABLE", True), patch.dict( + textfsm_mod.__opts__, {"textfsm_platform_grain": "textfsm_platform_grain"} + ), patch.dict( + textfsm_mod.__grains__, + {"textfsm_platform_grain": "textfsm_platform_grain"}, + ), patch.dict( + textfsm_mod.__salt__, + {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, + ): + mock_read_data = { + "/index": [mock_open_index], + "/juniper_version_template": [ + juniper_version_template_one, + juniper_version_template_two, + ], + } + with patch.object( + textfsm.clitable, "open", mock_open(read_data=mock_read_data) + ), patch.dict( + textfsm_mod.__salt__, + {"cp.get_file_str": MagicMock(return_value=output_text)}, ): - with patch.dict( - textfsm_mod.__grains__, - {"textfsm_platform_grain": "textfsm_platform_grain"}, - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.cache_dir": MagicMock(return_value="/path/to/cache/")}, - ): - mock_read_data = { - "/index": [mock_open_index], - "/juniper_version_template": [ - juniper_version_template_one, - juniper_version_template_two, - ], - } - with patch.object( - textfsm.clitable, "open", mock_open(read_data=mock_read_data) - ): - with patch.dict( - textfsm_mod.__salt__, - {"cp.get_file_str": MagicMock(return_value=output_text)}, - ): - ret = textfsm_mod.index( - command="sr ver", - platform="", - output_file="salt://textfsm/juniper_version_example", - textfsm_path="salt://textfsm/", - ) + ret = textfsm_mod.index( + command="sr ver", + platform="", + output_file="salt://textfsm/juniper_version_example", + textfsm_path="salt://textfsm/", + ) - assert ret == { - "out": None, - "result": False, - "comment": "Unable to process the output: No template found for attributes: \"{'Command': 'sr ver', 'Platform': 'textfsm_platform_grain'}\"", - } + assert ret == { + "out": None, + "result": False, + "comment": "Unable to process the output: No template found for attributes: \"{'Command': 'sr ver', 'Platform': 'textfsm_platform_grain'}\"", + } From 5ebf18260c6cfcd26889917a8bac844d6d0ed764 Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Thu, 16 Nov 2023 13:55:28 -0800 Subject: [PATCH 013/102] removing parens from pytest.fixture. --- tests/pytests/unit/modules/test_textfsm_mod.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index 1f7c99c8d62..ebf790055c6 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -13,7 +13,7 @@ textfsm = pytest.importorskip( ) -@pytest.fixture() +@pytest.fixture def configure_loader_modules(): return {textfsm_mod: {"__opts__": {}}} From fdfb0e1310d300311ac2061b6f0bdb405f0b31e7 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 14 Feb 2024 14:05:59 -0700 Subject: [PATCH 014/102] Fix test_log_beacon.py::test_log_match test on Windows --- tests/pytests/unit/modules/test_textfsm_mod.py | 1 - tests/support/mock.py | 6 +++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/modules/test_textfsm_mod.py b/tests/pytests/unit/modules/test_textfsm_mod.py index ebf790055c6..e158c55b719 100644 --- a/tests/pytests/unit/modules/test_textfsm_mod.py +++ b/tests/pytests/unit/modules/test_textfsm_mod.py @@ -2,7 +2,6 @@ :codeauthor: Gareth J. Greenaway """ - import pytest import salt.modules.textfsm_mod as textfsm_mod diff --git a/tests/support/mock.py b/tests/support/mock.py index ad3fb6c3359..9a6778684b4 100644 --- a/tests/support/mock.py +++ b/tests/support/mock.py @@ -219,7 +219,11 @@ class MockFH: def __exit__(self, exc_type, exc_val, exc_tb): # pylint: disable=unused-argument pass - def _seek(self, pos=0): + # For some reason this gets called with additional args on Windows when + # running the following test: + # tests/pytests/unit/beacons/test_log_beacon.py::test_log_match + # Let's just absorb them with *args + def _seek(self, pos=0, *args): self.__loc = pos self.read_data_iter = self._iterate_read_data(self.read_data) From 7e1ac8dd5a3e596e02ff4d7721ea81e7c5723b04 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 5 Mar 2024 17:46:50 +0000 Subject: [PATCH 015/102] Add `textfsm` to our requirements so that tests depending on it actually run --- requirements/static/ci/common.in | 1 + requirements/static/ci/py3.10/cloud.txt | 9 +++++++++ requirements/static/ci/py3.10/darwin.txt | 5 +++++ requirements/static/ci/py3.10/freebsd.txt | 5 +++++ requirements/static/ci/py3.10/lint.txt | 9 +++++++++ requirements/static/ci/py3.10/linux.txt | 5 +++++ requirements/static/ci/py3.10/windows.txt | 5 +++++ requirements/static/ci/py3.11/cloud.txt | 9 +++++++++ requirements/static/ci/py3.11/darwin.txt | 5 +++++ requirements/static/ci/py3.11/freebsd.txt | 5 +++++ requirements/static/ci/py3.11/lint.txt | 9 +++++++++ requirements/static/ci/py3.11/linux.txt | 5 +++++ requirements/static/ci/py3.11/windows.txt | 5 +++++ requirements/static/ci/py3.12/cloud.txt | 9 +++++++++ requirements/static/ci/py3.12/darwin.txt | 5 +++++ requirements/static/ci/py3.12/freebsd.txt | 5 +++++ requirements/static/ci/py3.12/lint.txt | 9 +++++++++ requirements/static/ci/py3.12/linux.txt | 5 +++++ requirements/static/ci/py3.12/windows.txt | 5 +++++ requirements/static/ci/py3.7/cloud.txt | 1 + requirements/static/ci/py3.7/freebsd.txt | 1 + requirements/static/ci/py3.7/linux.txt | 1 + requirements/static/ci/py3.7/windows.txt | 5 +++++ requirements/static/ci/py3.8/cloud.txt | 1 + requirements/static/ci/py3.8/freebsd.txt | 1 + requirements/static/ci/py3.8/lint.txt | 1 + requirements/static/ci/py3.8/linux.txt | 1 + requirements/static/ci/py3.8/windows.txt | 5 +++++ requirements/static/ci/py3.9/cloud.txt | 1 + requirements/static/ci/py3.9/darwin.txt | 1 + requirements/static/ci/py3.9/freebsd.txt | 1 + requirements/static/ci/py3.9/lint.txt | 1 + requirements/static/ci/py3.9/linux.txt | 1 + requirements/static/ci/py3.9/windows.txt | 5 +++++ 34 files changed, 142 insertions(+) diff --git a/requirements/static/ci/common.in b/requirements/static/ci/common.in index 6e1af3c6a4e..da673b5bbef 100644 --- a/requirements/static/ci/common.in +++ b/requirements/static/ci/common.in @@ -44,6 +44,7 @@ toml vcert~=0.7.0; sys_platform != 'win32' virtualenv>=20.3.0 watchdog>=0.9.0 +textfsm # Available template libraries that can be used genshi>=0.7.3 cheetah3>=3.2.2 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 8a45d45c88b..ed6dec2c13f 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -176,6 +176,10 @@ frozenlist==1.3.0 # -c requirements/static/ci/py3.10/linux.txt # aiohttp # aiosignal +future==1.0.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # textfsm genshi==0.7.5 # via # -c requirements/static/ci/py3.10/linux.txt @@ -629,6 +633,7 @@ six==1.16.0 # pyvmomi # pywinrm # responses + # textfsm # transitions # vcert # virtualenv @@ -654,6 +659,10 @@ tempora==4.1.1 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 252c180a965..cbc1bf07a5b 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -123,6 +123,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -438,6 +440,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # transitions # vcert # virtualenv @@ -454,6 +457,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 5d0a1e82abd..6eb3f06b1b7 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -121,6 +121,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 @@ -432,6 +434,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # transitions # vcert # virtualenv @@ -446,6 +449,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index 021f3fcdbaf..896d5067a64 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -178,6 +178,10 @@ frozenlist==1.3.0 # -c requirements/static/ci/py3.10/linux.txt # aiohttp # aiosignal +future==1.0.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # textfsm genshi==0.7.5 # via # -c requirements/static/ci/py3.10/linux.txt @@ -603,6 +607,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # transitions # vcert # virtualenv @@ -632,6 +637,10 @@ tempora==4.1.1 # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 2d5b587979a..fc740334dac 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -130,6 +130,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -471,6 +473,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # transitions # vcert # virtualenv @@ -489,6 +492,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index dbb2ffd5f78..0cf8f2fa124 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -116,6 +116,8 @@ frozenlist==1.3.3 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -426,6 +428,7 @@ six==1.15.0 # pyvmomi # pywinrm # responses + # textfsm # websocket-client smmap==4.0.0 # via @@ -439,6 +442,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 8b1195954a5..526e2afb27f 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -168,6 +168,10 @@ frozenlist==1.3.0 # -c requirements/static/ci/py3.11/linux.txt # aiohttp # aiosignal +future==1.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # textfsm genshi==0.7.5 # via # -c requirements/static/ci/py3.11/linux.txt @@ -585,6 +589,7 @@ six==1.16.0 # pyvmomi # pywinrm # responses + # textfsm # vcert # virtualenv # websocket-client @@ -609,6 +614,10 @@ tempora==4.1.1 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 086de67205e..aecdb2ab645 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -116,6 +116,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -404,6 +406,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -419,6 +422,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.11/darwin.txt diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index f45793eae7d..e2917fbc099 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -117,6 +117,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 @@ -403,6 +405,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -416,6 +419,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.11/freebsd.txt diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt index 9a63bcf4755..7253b90d85b 100644 --- a/requirements/static/ci/py3.11/lint.txt +++ b/requirements/static/ci/py3.11/lint.txt @@ -174,6 +174,10 @@ frozenlist==1.3.0 # -c requirements/static/ci/py3.11/linux.txt # aiohttp # aiosignal +future==1.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # textfsm genshi==0.7.5 # via # -c requirements/static/ci/py3.11/linux.txt @@ -562,6 +566,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -590,6 +595,10 @@ tempora==4.1.1 # -c requirements/static/ci/../pkg/py3.11/linux.txt # -c requirements/static/ci/py3.11/linux.txt # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 2acd1475ecf..eb479bfa93b 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -126,6 +126,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -442,6 +444,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -459,6 +462,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 76d878768c1..a91f9965484 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -112,6 +112,8 @@ frozenlist==1.3.3 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -422,6 +424,7 @@ six==1.15.0 # pyvmomi # pywinrm # responses + # textfsm # websocket-client smmap==4.0.0 # via @@ -435,6 +438,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.11/windows.txt diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index e0198f8a501..d7cacc921d9 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -168,6 +168,10 @@ frozenlist==1.3.0 # -c requirements/static/ci/py3.12/linux.txt # aiohttp # aiosignal +future==1.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # textfsm genshi==0.7.5 # via # -c requirements/static/ci/py3.12/linux.txt @@ -585,6 +589,7 @@ six==1.16.0 # pyvmomi # pywinrm # responses + # textfsm # vcert # virtualenv # websocket-client @@ -609,6 +614,10 @@ tempora==4.1.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 957d4868c02..848c1ba09a5 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -116,6 +116,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -404,6 +406,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -419,6 +422,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.12/darwin.txt diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index b866ab97341..665eca47f4b 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -117,6 +117,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 @@ -403,6 +405,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -416,6 +419,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.12/freebsd.txt diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt index 9f18f2fa089..35fa64e4e01 100644 --- a/requirements/static/ci/py3.12/lint.txt +++ b/requirements/static/ci/py3.12/lint.txt @@ -174,6 +174,10 @@ frozenlist==1.3.0 # -c requirements/static/ci/py3.12/linux.txt # aiohttp # aiosignal +future==1.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # textfsm genshi==0.7.5 # via # -c requirements/static/ci/py3.12/linux.txt @@ -562,6 +566,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -590,6 +595,10 @@ tempora==4.1.1 # -c requirements/static/ci/../pkg/py3.12/linux.txt # -c requirements/static/ci/py3.12/linux.txt # portend +textfsm==1.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index b83e47107f2..5eadb7bdb10 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -126,6 +126,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -442,6 +444,7 @@ six==1.16.0 # python-dateutil # pyvmomi # responses + # textfsm # vcert # virtualenv # websocket-client @@ -459,6 +462,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 58f45091768..b7e2bcd28af 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -112,6 +112,8 @@ frozenlist==1.3.3 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -422,6 +424,7 @@ six==1.15.0 # pyvmomi # pywinrm # responses + # textfsm # websocket-client smmap==4.0.0 # via @@ -435,6 +438,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.12/windows.txt diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index ba8dfa78e00..52c608a3d08 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -717,6 +717,7 @@ terminal==0.4.0 textfsm==1.1.0 # via # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index c3f8c81e54e..cfe9782bff3 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -498,6 +498,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 024b9b2e28f..96d1ee2e617 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -533,6 +533,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index fb531541c0c..5b88e0af436 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -123,6 +123,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -441,6 +443,7 @@ six==1.15.0 # pyvmomi # pywinrm # responses + # textfsm # virtualenv # websocket-client smmap==4.0.0 @@ -455,6 +458,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.7/windows.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.7/windows.txt diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index dce27db9ecf..8f8e877ef59 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -704,6 +704,7 @@ terminal==0.4.0 textfsm==1.1.0 # via # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index b976324e81e..f21ae83c808 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -485,6 +485,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 88f51ccdc02..6326ba0bcf1 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -670,6 +670,7 @@ terminal==0.4.0 textfsm==1.1.0 # via # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index a12e42c148d..6a4845f41b8 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -520,6 +520,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index c496cb1d763..dde66fdf584 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -118,6 +118,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -428,6 +430,7 @@ six==1.15.0 # pyvmomi # pywinrm # responses + # textfsm # virtualenv # websocket-client smmap==4.0.0 @@ -442,6 +445,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index c7e4877c89a..0af223eea14 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -706,6 +706,7 @@ terminal==0.4.0 textfsm==1.1.0 # via # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index e79e989dd3c..f531c09cbfd 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -495,6 +495,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index e16d252f995..6e0d5b75759 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -487,6 +487,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 3d81005bfbb..a01129a7f41 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -668,6 +668,7 @@ terminal==0.4.0 textfsm==1.1.0 # via # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index e39b2d77d7d..cba580cfb90 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -520,6 +520,7 @@ terminal==0.4.0 # via ntc-templates textfsm==1.1.0 # via + # -r requirements/static/ci/common.in # napalm # netmiko # ntc-templates diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 72163cc5214..51937f10ec8 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -118,6 +118,8 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal +future==1.0.0 + # via textfsm genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 @@ -429,6 +431,7 @@ six==1.15.0 # pyvmomi # pywinrm # responses + # textfsm # virtualenv # websocket-client smmap==4.0.0 @@ -443,6 +446,8 @@ tempora==4.1.1 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt # portend +textfsm==1.1.3 + # via -r requirements/static/ci/common.in timelib==0.2.5 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt From 45cf38f4b40fe764aea6c1d82f1aed2287a8be0a Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Tue, 27 Feb 2024 08:59:45 -0700 Subject: [PATCH 016/102] Fix an issue with the win_pki execution module Change the _cmd_run function so that it raises a CommandExecutionError when the command fails so that it doesn't try to parse the output of the command as JSON Also raise a CommandExecutionError when it recieves invalid JSON --- changelog/64933.fixed.md | 1 + salt/modules/win_pki.py | 12 ++++-- tests/pytests/unit/modules/test_win_pki.py | 46 +++++++++++++++++++++- 3 files changed, 54 insertions(+), 5 deletions(-) create mode 100644 changelog/64933.fixed.md diff --git a/changelog/64933.fixed.md b/changelog/64933.fixed.md new file mode 100644 index 00000000000..e6f233db29d --- /dev/null +++ b/changelog/64933.fixed.md @@ -0,0 +1 @@ +Display a proper error when pki commands fail in the win_pki module diff --git a/salt/modules/win_pki.py b/salt/modules/win_pki.py index e004f0bc9e8..6cc68309ad7 100644 --- a/salt/modules/win_pki.py +++ b/salt/modules/win_pki.py @@ -23,7 +23,7 @@ import salt.utils.json import salt.utils.platform import salt.utils.powershell import salt.utils.versions -from salt.exceptions import SaltInvocationError +from salt.exceptions import CommandExecutionError, SaltInvocationError _DEFAULT_CONTEXT = "LocalMachine" _DEFAULT_FORMAT = "cer" @@ -73,15 +73,19 @@ def _cmd_run(cmd, as_json=False): "".join(cmd_full), shell="powershell", python_shell=True ) - if cmd_ret["retcode"] != 0: - _LOG.error("Unable to execute command: %s\nError: %s", cmd, cmd_ret["stderr"]) + if cmd_ret["stderr"]: + raise CommandExecutionError( + "Unable to execute command: {}\nError: {}".format(cmd, cmd_ret["stderr"]) + ) if as_json: try: items = salt.utils.json.loads(cmd_ret["stdout"], strict=False) return items except ValueError: - _LOG.error("Unable to parse return data as Json.") + raise CommandExecutionError( + "Unable to parse return data as JSON:\n{}".format(cmd_ret["stdout"]) + ) return cmd_ret["stdout"] diff --git a/tests/pytests/unit/modules/test_win_pki.py b/tests/pytests/unit/modules/test_win_pki.py index 600282e8bd8..e90f4368e9f 100644 --- a/tests/pytests/unit/modules/test_win_pki.py +++ b/tests/pytests/unit/modules/test_win_pki.py @@ -1,10 +1,11 @@ """ - Test cases for salt.modules.win_pki +Test cases for salt.modules.win_pki """ import pytest import salt.modules.win_pki as win_pki +from salt.exceptions import CommandExecutionError from tests.support.mock import MagicMock, patch @@ -181,3 +182,46 @@ def test_remove_cert(thumbprint, certs): "salt.modules.win_pki.get_certs", MagicMock(return_value=certs) ): assert win_pki.remove_cert(thumbprint=thumbprint[::-1]) + + +def test__cmd_run(): + """ + Test the _cmd_run function + """ + mock_run = MagicMock( + return_value={"retcode": 0, "stderr": "", "stdout": "some result"} + ) + with patch.dict(win_pki.__salt__, {"cmd.run_all": mock_run}): + result = win_pki._cmd_run(cmd="command") + assert result == "some result" + + +def test__cmd_run_as_json(): + mock_run = MagicMock( + return_value={"retcode": 0, "stderr": "", "stdout": '{"key": "value"}'} + ) + with patch.dict(win_pki.__salt__, {"cmd.run_all": mock_run}): + result = win_pki._cmd_run(cmd="command", as_json=True) + assert result == {"key": "value"} + + +def test__cmd_run_stderr(): + mock_run = MagicMock( + return_value={"retcode": 0, "stderr": "some error", "stdout": ""} + ) + with patch.dict(win_pki.__salt__, {"cmd.run_all": mock_run}): + with pytest.raises(CommandExecutionError) as exc_info: + win_pki._cmd_run(cmd="command") + expected = "Unable to execute command: command\nError: some error" + assert exc_info.value.args[0] == expected + + +def test__cmd_run_bad_json(): + mock_run = MagicMock( + return_value={"retcode": 0, "stderr": "", "stdout": "not : valid\njson"} + ) + with patch.dict(win_pki.__salt__, {"cmd.run_all": mock_run}): + with pytest.raises(CommandExecutionError) as exc_info: + win_pki._cmd_run(cmd="command", as_json=True) + expected = "Unable to parse return data as JSON:\nnot : valid\njson" + assert exc_info.value.args[0] == expected From 5e36621803b4b9fa50ed5ea02dfa6251ed459a5e Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Fri, 8 Mar 2024 11:26:39 -0700 Subject: [PATCH 017/102] Upgrade WIX to 3.14 --- pkg/windows/install_wix.ps1 | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/pkg/windows/install_wix.ps1 b/pkg/windows/install_wix.ps1 index 5426bf16ff5..ecbba1f353a 100644 --- a/pkg/windows/install_wix.ps1 +++ b/pkg/windows/install_wix.ps1 @@ -74,17 +74,15 @@ if ( (Get-WindowsOptionalFeature -Online -FeatureName "NetFx3").State -eq "Enabl #------------------------------------------------------------------------------- Write-Host "Looking for Wix Toolset: " -NoNewline -# 64bit: {03368010-193D-4AE2-B275-DD2EB32CD427} -# 32bit: {07188017-A460-4C0D-A386-6B3CEB8E20CD} -if ((ProductcodeExists "{03368010-193D-4AE2-B275-DD2EB32CD427}") ` - -or ` - (ProductcodeExists "{07188017-A460-4C0D-A386-6B3CEB8E20CD}")) { +$guid_64 = "{A2D09E18-32F8-4E34-946A-33AC8C8303E9}" +$guid_32 = "{00A0C4F8-9F6C-40FB-A02D-3EAE1D7FD352}" +if ( (ProductcodeExists $guid_64) -or (ProductcodeExists $guid_32) ) { Write-Result "Success" -ForegroundColor Green } else { Write-Result "Missing" -ForegroundColor Yellow Write-Host "Downloading Wix Toolset: " -NoNewline - $url = "https://github.com/wixtoolset/wix3/releases/download/wix3112rtm/wix311.exe" + $url = "https://github.com/wixtoolset/wix3/releases/download/wix314rtm/wix314.exe" $file = "$env:TEMP\wix_installer.exe" Invoke-WebRequest -Uri $url -OutFile "$file" if ( Test-Path -Path "$file" ) { @@ -95,10 +93,17 @@ if ((ProductcodeExists "{03368010-193D-4AE2-B275-DD2EB32CD427}") ` } Write-Host "Installing Wix Toolset: " -NoNewline - Start-Process $file -ArgumentList "/install","/quiet","/norestart" -Wait -NoNewWindow - if ((ProductcodeExists "{03368010-193D-4AE2-B275-DD2EB32CD427}") ` - -or ` - (ProductcodeExists "{07188017-A460-4C0D-A386-6B3CEB8E20CD}")) { + $process = Start-Process $file -ArgumentList "/install","/quiet","/norestart" -PassThru -Wait -NoNewWindow + + if ( $process.ExitCode -eq 0 ) { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } + + Write-Host "Verifying Wix Toolset Installation: " -NoNewline + if ( (ProductcodeExists $guid_64) -or (ProductcodeExists $guid_32) ) { Write-Result "Success" -ForegroundColor Green } else { Write-Result "Failed" -ForegroundColor Red From 6952f883bd8e57cd1cd8cdf37a7d1ceb3216b8f6 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 7 Mar 2024 21:38:31 +0000 Subject: [PATCH 018/102] Bump to `pytest-timeout==2.3.1` --- requirements/pytest.txt | 2 +- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/cloud.txt | 2 +- requirements/static/ci/py3.11/darwin.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.11/windows.txt | 2 +- requirements/static/ci/py3.12/cloud.txt | 2 +- requirements/static/ci/py3.12/darwin.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.12/windows.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) diff --git a/requirements/pytest.txt b/requirements/pytest.txt index 950867df605..5e6c895538d 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -5,7 +5,7 @@ pytest >= 7.2.0 pytest-salt-factories >= 1.0.0rc29 pytest-helpers-namespace >= 2019.1.8 pytest-subtests -pytest-timeout +pytest-timeout >= 2.3.1 pytest-httpserver pytest-custom-exit-code >= 0.3 flaky diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index ed6dec2c13f..818a5e51986 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -496,7 +496,7 @@ pytest-system-statistics==1.0.2 # via # -c requirements/static/ci/py3.10/linux.txt # pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index cbc1bf07a5b..0fe37c86c69 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -345,7 +345,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 6eb3f06b1b7..6c17bca2934 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -338,7 +338,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index fc740334dac..a5d54eb879b 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -355,7 +355,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 0cf8f2fa124..54bfbf52253 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -323,7 +323,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==2.1.0 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 526e2afb27f..a5b718e4e8a 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -460,7 +460,7 @@ pytest-system-statistics==1.0.2 # via # -c requirements/static/ci/py3.11/linux.txt # pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index aecdb2ab645..53ae920953d 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -316,7 +316,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index e2917fbc099..8c96cc7156f 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -315,7 +315,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index eb479bfa93b..d90d36dd8f8 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -332,7 +332,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index a91f9965484..1568d217eb2 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -319,7 +319,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==2.1.0 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index d7cacc921d9..34ea9ce97ab 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -460,7 +460,7 @@ pytest-system-statistics==1.0.2 # via # -c requirements/static/ci/py3.12/linux.txt # pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 848c1ba09a5..e0a1c165ae8 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -316,7 +316,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index 665eca47f4b..e22a444c272 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -315,7 +315,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 5eadb7bdb10..5a851d445b7 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -332,7 +332,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index b7e2bcd28af..2a07172fb2a 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -319,7 +319,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==2.1.0 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 52c608a3d08..4eebe8c3fce 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -543,7 +543,7 @@ pytest-system-statistics==1.0.2 # via # -c requirements/static/ci/py3.7/linux.txt # pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index cfe9782bff3..907bfe38c62 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -378,7 +378,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 96d1ee2e617..77044b1661d 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -390,7 +390,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index 5b88e0af436..f8dce51150f 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -337,7 +337,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 8f8e877ef59..b7c860f0a21 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -530,7 +530,7 @@ pytest-system-statistics==1.0.2 # via # -c requirements/static/ci/py3.8/linux.txt # pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index f21ae83c808..2fe8376de2a 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -365,7 +365,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 6a4845f41b8..ed32730015a 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -377,7 +377,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index dde66fdf584..658797085da 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -324,7 +324,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 0af223eea14..411e589ac4d 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -532,7 +532,7 @@ pytest-system-statistics==1.0.2 # via # -c requirements/static/ci/py3.9/linux.txt # pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index f531c09cbfd..07849b7b168 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -374,7 +374,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 6e0d5b75759..ceed8c1c838 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -367,7 +367,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index cba580cfb90..d9ce9adf4cf 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -377,7 +377,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 51937f10ec8..58630bde24f 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -325,7 +325,7 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-timeout==1.4.2 +pytest-timeout==2.3.1 # via -r requirements/pytest.txt pytest==7.3.2 # via From c402aab17c89ae7760d78283c2970a22b4329340 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 8 Mar 2024 12:16:39 +0000 Subject: [PATCH 019/102] Bump the timeout for the pyobjects unit tests --- tests/unit/utils/test_pyobjects.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/unit/utils/test_pyobjects.py b/tests/unit/utils/test_pyobjects.py index 6a7bd406237..981a3351cde 100644 --- a/tests/unit/utils/test_pyobjects.py +++ b/tests/unit/utils/test_pyobjects.py @@ -24,6 +24,10 @@ from salt.utils.pyobjects import ( from tests.support.runtests import RUNTIME_VARS from tests.support.unit import TestCase +pytestmark = [ + pytest.mark.timeout_unless_on_windows(240), +] + log = logging.getLogger(__name__) From fb86bf3423b977ee2a0417645bc40f55cfe9363f Mon Sep 17 00:00:00 2001 From: Erik Johnson Date: Wed, 21 Feb 2024 17:57:36 -0600 Subject: [PATCH 020/102] Fix file.directory clobbering backupname path with test=True --- salt/states/file.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/salt/states/file.py b/salt/states/file.py index e81dafa4a59..6157a5c0810 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -3888,13 +3888,25 @@ def directory( if not force: return _error( ret, - "File exists where the backup target {} should go".format( - backupname - ), + f"File exists where the backup target {backupname} should go", ) + if __opts__["test"]: + ret["changes"][ + "forced" + ] = f"Existing file at backup path {backupname} would be removed" else: __salt__["file.remove"](backupname) - os.rename(name, backupname) + + if __opts__["test"]: + ret["changes"]["backup"] = f"{name} would be renamed to {backupname}" + ret["changes"][name] = {"directory": "new"} + ret[ + "comment" + ] = f"{name} would be backed up and replaced with a new directory" + ret["result"] = None + return ret + else: + os.rename(name, backupname) elif force: # Remove whatever is in the way if os.path.isfile(name): From 9344558cc9a335fad077b9a0d52d9e698011ae5b Mon Sep 17 00:00:00 2001 From: Erik Johnson Date: Wed, 21 Feb 2024 18:57:41 -0600 Subject: [PATCH 021/102] Add test case --- .../functional/states/file/test_directory.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index 82a3f7f154c..904a7d43b97 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -436,3 +436,55 @@ def test_issue_12209_follow_symlinks( assert one_group_check == state_file_account.group.name two_group_check = modules.file.get_group(str(twodir), follow_symlinks=False) assert two_group_check == state_file_account.group.name + + +@pytest.mark.parametrize("backupname_isfile", [False, True]) +def test_directory_backupname_force_test_mode_noclobber( + file, tmp_path, backupname_isfile +): + """ + Ensure that file.directory does not make changes when backupname is used + alongside force=True and test=True. + + See https://github.com/saltstack/salt/issues/66049 + """ + source_dir = tmp_path / "source_directory" + dest_dir = tmp_path / "dest_directory" + backupname = tmp_path / "backup_dir" + source_dir.mkdir() + dest_dir.symlink_to(source_dir.resolve()) + + if backupname_isfile: + backupname.touch() + assert backupname.is_file() + + ret = file.directory( + name=str(dest_dir), + allow_symlink=False, + force=True, + backupname=str(backupname), + test=True, + ) + + # Confirm None result + assert ret.result is None + try: + # Confirm dest_dir not modified + assert dest_dir.readlink() == source_dir + except OSError: + pytest.fail(f"{dest_dir} was modified") + + # Confirm that comment and changes match what we expect + assert ( + ret.comment + == f"{dest_dir} would be backed up and replaced with a new directory" + ) + assert ret.changes[str(dest_dir)] == {"directory": "new"} + assert ret.changes["backup"] == f"{dest_dir} would be renamed to {backupname}" + + if backupname_isfile: + assert ret.changes["forced"] == ( + f"Existing file at backup path {backupname} would be removed" + ) + else: + assert "forced" not in ret.changes From 6da38663e72a4d9976b4567dd1bbd3f8a24c182d Mon Sep 17 00:00:00 2001 From: Erik Johnson Date: Wed, 21 Feb 2024 19:04:49 -0600 Subject: [PATCH 022/102] Add changelog entry --- changelog/66049.fixed.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelog/66049.fixed.md diff --git a/changelog/66049.fixed.md b/changelog/66049.fixed.md new file mode 100644 index 00000000000..baff6e063d3 --- /dev/null +++ b/changelog/66049.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue with file.directory state where paths would be modified in test +mode if backupname is used. From 8512c0893a0758dfd48d8495e0e6c4f3ddd6906f Mon Sep 17 00:00:00 2001 From: Erik Johnson Date: Mon, 11 Mar 2024 09:28:58 -0500 Subject: [PATCH 023/102] black --- salt/states/file.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/salt/states/file.py b/salt/states/file.py index 6157a5c0810..c2a2a08c997 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -3900,9 +3900,9 @@ def directory( if __opts__["test"]: ret["changes"]["backup"] = f"{name} would be renamed to {backupname}" ret["changes"][name] = {"directory": "new"} - ret[ - "comment" - ] = f"{name} would be backed up and replaced with a new directory" + ret["comment"] = ( + f"{name} would be backed up and replaced with a new directory" + ) ret["result"] = None return ret else: From 0105aecd961e1e2c89785df70d5075d0f16f6810 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Mon, 11 Mar 2024 08:52:05 -0600 Subject: [PATCH 024/102] Fix test on Windows --- tests/pytests/functional/states/file/test_directory.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index 904a7d43b97..19cb3f94876 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -449,9 +449,9 @@ def test_directory_backupname_force_test_mode_noclobber( See https://github.com/saltstack/salt/issues/66049 """ source_dir = tmp_path / "source_directory" + source_dir.mkdir() dest_dir = tmp_path / "dest_directory" backupname = tmp_path / "backup_dir" - source_dir.mkdir() dest_dir.symlink_to(source_dir.resolve()) if backupname_isfile: @@ -470,7 +470,7 @@ def test_directory_backupname_force_test_mode_noclobber( assert ret.result is None try: # Confirm dest_dir not modified - assert dest_dir.readlink() == source_dir + assert salt.utils.path.readlink(str(dest_dir)) == str(source_dir) except OSError: pytest.fail(f"{dest_dir} was modified") From d09b485c16c28a55ac09517d5da6425510cc899c Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Thu, 7 Mar 2024 11:58:50 -0700 Subject: [PATCH 025/102] Honor saltenv in refresh_db on Windows --- changelog/61807.fixed.md | 1 + .../windows/windows-package-manager.rst | 149 ++++++++++++++++-- salt/modules/win_pkg.py | 18 ++- tests/pytests/unit/modules/test_win_pkg.py | 18 +++ 4 files changed, 170 insertions(+), 16 deletions(-) create mode 100644 changelog/61807.fixed.md diff --git a/changelog/61807.fixed.md b/changelog/61807.fixed.md new file mode 100644 index 00000000000..88d16c6352d --- /dev/null +++ b/changelog/61807.fixed.md @@ -0,0 +1 @@ +pkg.refresh_db on Windows now honors saltenv diff --git a/doc/topics/windows/windows-package-manager.rst b/doc/topics/windows/windows-package-manager.rst index 100c85504f3..38c5ba4dd3c 100644 --- a/doc/topics/windows/windows-package-manager.rst +++ b/doc/topics/windows/windows-package-manager.rst @@ -301,15 +301,15 @@ winrepo_source_dir :conf_minion:`winrepo_source_dir` (str) -The location of the .sls files on the Salt file server. This allows for using -different environments. Default is ``salt://win/repo-ng/``. +The location of the .sls files on the Salt file server. Default is +``salt://win/repo-ng/``. .. warning:: - If the default for ``winrepo_dir_ng`` is changed, this setting may need to - changed on each minion. The default setting for ``winrepo_dir_ng`` is - ``/srv/salt/win/repo-ng``. If that were changed to ``/srv/salt/new/repo-ng`` - then the ``winrepo_source_dir`` would need to be changed to - ``salt://new/repo-ng`` + If the default for ``winrepo_dir_ng`` is changed, then this setting will + also need to be changed on each minion. The default setting for + ``winrepo_dir_ng`` is ``/srv/salt/win/repo-ng``. If that were changed to + ``/srv/salt/new/repo-ng`` then the ``winrepo_source_dir`` would need to be + changed to ``salt://new/repo-ng`` Masterless Minion Configuration =============================== @@ -332,7 +332,7 @@ winrepo_dir This setting is maintained for backwards compatibility with legacy minions. It points to the location in the ``file_roots`` where the winrepo files are kept. -The default is: ``C:\salt\srv\salt\win\repo`` +The default is: ``C:\ProgramData\Salt Project\Salt\srv\salt\win\repo`` winrepo_dir_ng -------------- @@ -340,7 +340,7 @@ winrepo_dir_ng :conf_minion:`winrepo_dir_ng` (str) The location in the ``file_roots where the winrepo files are kept. The default -is ``C:\salt\srv\salt\win\repo-ng``. +is ``C:\ProgramData\Salt Project\Salt\srv\salt\win\repo-ng``. .. warning:: You can change the location of the winrepo directory. However, it must @@ -466,6 +466,137 @@ that succeeded or failed to compile: Use ``pkg.refresh_db`` when developing new Windows package definitions to check for errors in the definitions against one or more Windows minions. + +Sample Configurations +********************* + +Masterless +========== + +The configs in this section are for working with winrepo on a Windows minion +using ``salt-call --local``. + +Default Configuration +--------------------- + +This is the default configuration if nothing is configured in the minion config. +The config is shown here for clarity. These are the defaults: + +.. code-block:: yaml + + file_roots: + base: + - C:\ProgramData\Salt Project\Salt\srv\salt + winrepo_source_dir: 'salt://win/repo-ng' + winrepo_dir_ng: C:\ProgramData\Salt Project\Salt\srv\salt\win\repo-ng + +The :mod:`winrepo.update_git_repos ` +command will clone the repository to ``win\repo-ng`` on the file_roots. + +Multiple Salt Environments +-------------------------- + +This starts to get a little tricky. The winrepo repository doesn't +get cloned to each environment when you run +:mod:`winrepo.update_git_repos `, so to +make this work, all environments share the same winrepo. Applying states using +the ``saltenv`` option will find the state files in the appropriate environment, +but the package definition files will always be pulled from the same location. +Therefore, you have to put the same winrepo location in each saltenv. Here's how +this would look: + +.. code-block:: yaml + + file_roots: + base: + - C:\ProgramData\Salt Project\Salt\srv\salt\base + - C:\ProgramData\Salt Project\Salt\srv\salt\winrepo + test: + - C:\ProgramData\Salt Project\Salt\srv\salt\test + - C:\ProgramData\Salt Project\Salt\srv\salt\winrepo + winrepo_source_dir: 'salt://salt-winrepo-ng' + winrepo_dir_ng: C:\ProgramData\Salt Project\Salt\srv\salt\winrepo + winrepo_dir: C:\ProgramData\Salt Project\Salt\srv\salt\winrepo + +When you run +:mod:`winrepo.update_git_repos ` the +Git repository will be cloned to the location specified in the +``winrepo_dir_ng`` setting. I specified the ``winrepo_dir`` setting just so +everything gets cloned to the same place. The directory that gets cloned is +named ``salt-winrepo-ng`` so you specify that in the ``winrepo_source_dir`` +setting. + +The ``winrepo`` directory should only contain the package definition files. You +wouldn't want to place any states in the ``winrepo`` directory as they will be +available to both environments. + +Master +====== + +When working in a Master/Minion environment you have to split up some of the +config settings between the master and the minion. Here are some sample configs +for winrepo in a Master/Minion environment. + +Default Configuration +--------------------- + +This is the default configuration if nothing is configured. The config is shown +here for clarity. These are the defaults on the master: + +.. code-block:: yaml + + file_roots: + base: + - /srv/salt + winrepo_dir_ng: /srv/salt/win/repo-ng + +This is the default in the minion config: + +.. code-block:: yaml + + winrepo_source_dir: 'salt://win/repo-ng' + +The :mod:`winrepo.update_git_repos ` +command will clone the repository to ``win\repo-ng`` on the file_roots. + +Multiple Salt Environments +-------------------------- + +To set up multiple saltenvs using a Master/Minion configuration set the +following in the master config: + +.. code-block:: yaml + + file_roots: + base: + - /srv/salt/base + - /srv/salt/winrepo + test: + - /srv/salt/test + - /srv/salt/winrepo + winrepo_dir_ng: /srv/salt/winrepo + winrepo_dir: /srv/salt/winrepo + +Use the winrepo runner to set up the winrepo repository on the master. + +.. code-block:: bash + + salt-run winrepo.update_git_repos + +The winrepo will be cloned to ``/srv/salt/winrepo`` under a directory named +``salt-winrepo-ng``. + +Set the following on the minion config so the minion knows where to find the +package definition files in the file_roots: + +.. code-block:: yaml + + winrepo_source_dir: 'salt://salt-winrepo-ng' + +The same stipulations apply in a Master/Minion configuration as they do in a +Masterless configuration + + Usage ***** diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index aff40b3d54d..e96b8123e43 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -907,7 +907,7 @@ def refresh_db(**kwargs): The database is stored in a serialized format located by default at the following location: - ``C:\salt\var\cache\salt\minion\files\base\win\repo-ng\winrepo.p`` + ``C:\ProgramData\Salt Project\Salt\var\cache\salt\minion\files\base\win\repo-ng\winrepo.p`` This module performs the following steps to generate the software metadata database: @@ -976,7 +976,7 @@ def refresh_db(**kwargs): .. warning:: When calling this command from a state using `module.run` be sure to - pass `failhard: False`. Otherwise the state will report failure if it + pass `failhard: False`. Otherwise, the state will report failure if it encounters a bad software definition file. CLI Example: @@ -1170,10 +1170,11 @@ def genrepo(**kwargs): if name.endswith(".sls"): total_files_processed += 1 _repo_process_pkg_sls( - os.path.join(root, name), - os.path.join(short_path, name), - ret, - successful_verbose, + filename=os.path.join(root, name), + short_path_name=os.path.join(short_path, name), + ret=ret, + successful_verbose=successful_verbose, + saltenv=saltenv, ) with salt.utils.files.fopen(repo_details.winrepo_file, "wb") as repo_cache: @@ -1212,7 +1213,9 @@ def genrepo(**kwargs): return results -def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose): +def _repo_process_pkg_sls( + filename, short_path_name, ret, successful_verbose, saltenv="base" +): renderers = salt.loader.render(__opts__, __salt__) def _failed_compile(prefix_msg, error_msg): @@ -1227,6 +1230,7 @@ def _repo_process_pkg_sls(filename, short_path_name, ret, successful_verbose): __opts__["renderer"], __opts__.get("renderer_blacklist", ""), __opts__.get("renderer_whitelist", ""), + saltenv=saltenv, ) except SaltRenderError as exc: return _failed_compile("Failed to compile", exc) diff --git a/tests/pytests/unit/modules/test_win_pkg.py b/tests/pytests/unit/modules/test_win_pkg.py index d892e79a21b..3ae8f24f8dd 100644 --- a/tests/pytests/unit/modules/test_win_pkg.py +++ b/tests/pytests/unit/modules/test_win_pkg.py @@ -755,3 +755,21 @@ def test__reverse_cmp_pkg_versions(v1, v2, expected): assert result == expected, "cmp({}, {}) should be {}, got {}".format( v1, v2, expected, result ) + + +def test__repo_process_pkg_sls(): + patch_render = patch("salt.loader.render") + patch_opts = patch.dict(win_pkg.__opts__, {"renderer": None}) + patch_compile = patch("salt.template.compile_template", return_value="junk") + with patch_opts, patch_render as render, patch_compile as test: + ret = win_pkg._repo_process_pkg_sls( + filename="junk", + short_path_name="junk", + ret={}, + successful_verbose=False, + saltenv="spongebob", + ) + assert ret is False + test.assert_called_once_with( + "junk", render(), None, "", "", saltenv="spongebob" + ) From 176ccd63e35f0e246746382d4ae02fc3fab09c6f Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Fri, 8 Mar 2024 10:10:05 -0700 Subject: [PATCH 026/102] Fix issue with refresh_db on Windows When a new package definition file was added to the repo you had to run refresh_db twice to get it to show up in the package databse. This clears the cache before refreshing the database to force changes. --- changelog/63848.fixed.md | 2 ++ salt/modules/win_pkg.py | 26 ++++++++++++-- tests/filename_map.yml | 1 + .../functional/modules/test_win_pkg.py | 35 +++++++++++++++++++ 4 files changed, 62 insertions(+), 2 deletions(-) create mode 100644 changelog/63848.fixed.md create mode 100644 tests/pytests/functional/modules/test_win_pkg.py diff --git a/changelog/63848.fixed.md b/changelog/63848.fixed.md new file mode 100644 index 00000000000..d297d7a3467 --- /dev/null +++ b/changelog/63848.fixed.md @@ -0,0 +1,2 @@ +Fixes an issue in pkg.refresh_db on Windows where new package definition +files were not being picked up on the first run diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index e96b8123e43..8b834bbd33e 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -47,6 +47,7 @@ import time import urllib.parse from functools import cmp_to_key +import salt.fileserver import salt.payload import salt.syspaths import salt.utils.args @@ -915,7 +916,7 @@ def refresh_db(**kwargs): - Fetch the package definition files (.sls) from `winrepo_source_dir` (default `salt://win/repo-ng`) and cache them in `\files\\` - (default: ``C:\salt\var\cache\salt\minion\files\base\win\repo-ng``) + (default: ``C:\ProgramData\Salt Project\Salt\var\cache\salt\minion\files\base\win\repo-ng``) - Call :py:func:`pkg.genrepo ` to parse the package definition files and generate the repository metadata database file (`winrepo.p`) @@ -1020,6 +1021,11 @@ def refresh_db(**kwargs): "Failed to clear one or more winrepo cache files", info={"failed": failed} ) + # Clear the cache so that newly copied package definitions will be picked up + fileserver = salt.fileserver.Fileserver(__opts__) + load = {"saltenv": saltenv, "fsbackend": None} + fileserver.clear_file_list_cache(load=load) + # Cache repo-ng locally log.info("Fetching *.sls files from %s", repo_details.winrepo_source_dir) try: @@ -2363,7 +2369,23 @@ def _get_name_map(saltenv="base"): def get_package_info(name, saltenv="base"): """ - Return package info. Returns empty map if package not available. + Get information about the package as found in the winrepo database + + Args: + + name (str): The name of the package + + saltenv (str): The salt environment to use. Default is "base" + + Returns: + dict: A dictionary of package info, empty if package not available + + CLI Example: + + .. code-block:: bash + + salt '*' pkg.get_package_info chrome + """ return _get_package_info(name=name, saltenv=saltenv) diff --git a/tests/filename_map.yml b/tests/filename_map.yml index ded66d32410..d4a2143d3be 100644 --- a/tests/filename_map.yml +++ b/tests/filename_map.yml @@ -17,6 +17,7 @@ salt/_logging/(impl|handlers).py: salt/modules/(apkpkg|aptpkg|ebuildpkg|dpkg_lowpkg|freebsdpkg|mac_brew_pkg|mac_ports_pkg|openbsdpkg|opkg|pacmanpkg|pkgin|pkgng|pkg_resource|rpm_lowpkg|solarisipspkg|solarispkg|win_pkg|xbpspkg|yumpkg|zypperpkg)\.py: - pytests.unit.states.test_pkg - pytests.functional.modules.test_pkg + - pytests.functional.modules.test_win_pkg - pytests.functional.states.test_pkg - pytests.functional.states.pkgrepo.test_centos - pytests.functional.states.pkgrepo.test_debian diff --git a/tests/pytests/functional/modules/test_win_pkg.py b/tests/pytests/functional/modules/test_win_pkg.py new file mode 100644 index 00000000000..b68895ef625 --- /dev/null +++ b/tests/pytests/functional/modules/test_win_pkg.py @@ -0,0 +1,35 @@ +import pytest + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module") +def pkg_def_contents(state_tree): + return r""" + my-software: + '1.0.1': + full_name: 'My Software' + installer: 'C:\files\mysoftware.msi' + install_flags: '/qn /norestart' + uninstaller: 'C:\files\mysoftware.msi' + uninstall_flags: '/qn /norestart' + msiexec: True + reboot: False + """ + + +@pytest.fixture(scope="module") +def pkg(modules): + yield modules.pkg + + +def test_refresh_db(pkg, pkg_def_contents, state_tree, minion_opts): + assert len(pkg.get_package_info("my-software")) == 0 + repo_dir = state_tree / "win" / "repo-ng" + with pytest.helpers.temp_file("my-software.sls", pkg_def_contents, repo_dir): + pkg.refresh_db() + assert len(pkg.get_package_info("my-software")) == 1 From 0fbe408bbbbf8b8eb034b56b9dd1e15c9510b1ec Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Fri, 8 Mar 2024 10:25:06 -0700 Subject: [PATCH 027/102] Remove get_package_info from missing_examples --- tools/precommit/docstrings.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tools/precommit/docstrings.py b/tools/precommit/docstrings.py index b26f8b1a954..665b171fd64 100644 --- a/tools/precommit/docstrings.py +++ b/tools/precommit/docstrings.py @@ -751,7 +751,6 @@ MISSING_EXAMPLES = { "delete_advanced_configs", "get_vm", ], - "salt/modules/win_pkg.py": ["get_package_info"], "salt/modules/win_timezone.py": ["zone_compare"], "salt/modules/zk_concurrency.py": [ "lock", From 9fb22eee5f14a16e61651faa48837b55eee9ebf3 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Tue, 12 Mar 2024 07:59:20 -0600 Subject: [PATCH 028/102] Remove offending colon --- .github/PULL_REQUEST_TEMPLATE.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 7d63a7c6f2c..629117fe672 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,7 +1,7 @@ ### What does this PR do? ### What issues does this PR fix or reference? -Fixes: +Fixes ### Previous Behavior Remove this section if not relevant From cbae0427011dcb97581f63a077fd692b4c69a0ee Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Tue, 12 Mar 2024 11:19:00 -0600 Subject: [PATCH 029/102] Don't use curl on Windows --- tools/utils/__init__.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py index 78913a43754..721b7670e17 100644 --- a/tools/utils/__init__.py +++ b/tools/utils/__init__.py @@ -223,7 +223,11 @@ def download_file( headers: dict[str, str] | None = None, ) -> pathlib.Path: ctx.info(f"Downloading {dest.name!r} @ {url} ...") - curl = shutil.which("curl") + if sys.platform == "win32": + # We don't want to use curl on Windows, it doesn't work + curl = None + else: + curl = shutil.which("curl") if curl is not None: command = [curl, "-sS", "-L"] if headers: From cb7cd3e8d97cd20812b82d62e4a664469a7b08f1 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 12 Mar 2024 13:58:55 +0000 Subject: [PATCH 030/102] Daylight savings suck! --- tests/pytests/functional/modules/test_mac_timezone.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/modules/test_mac_timezone.py b/tests/pytests/functional/modules/test_mac_timezone.py index 2f153dd9b35..3716a76162a 100644 --- a/tests/pytests/functional/modules/test_mac_timezone.py +++ b/tests/pytests/functional/modules/test_mac_timezone.py @@ -156,6 +156,9 @@ def test_get_offset(timezone): """ Test timezone.get_offset """ + pytz = pytest.importorskip("pytz") + now = datetime.datetime.now(tz=pytz.UTC) + ret = timezone.set_zone("Pacific/Wake") assert ret ret = timezone.get_offset() @@ -166,7 +169,11 @@ def test_get_offset(timezone): assert ret ret = timezone.get_offset() assert isinstance(ret, str) - assert ret == "-0800" + + if now.astimezone(pytz.timezone("America/Los_Angeles")).dst(): + assert ret == "-0700" + else: + assert ret == "-0800" @pytest.mark.usefixtures("_reset_zone") From 3c3285e51e10010a1203edda42a0000abd2fda1b Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Wed, 13 Mar 2024 04:03:04 +0000 Subject: [PATCH 031/102] move fix from #66164 for $65703 --- changelog/66705.fixed.md | 1 + salt/modules/aptpkg.py | 39 ++++++++++++----------- tests/pytests/unit/modules/test_aptpkg.py | 35 ++++++++++++++++++++ 3 files changed, 56 insertions(+), 19 deletions(-) create mode 100644 changelog/66705.fixed.md diff --git a/changelog/66705.fixed.md b/changelog/66705.fixed.md new file mode 100644 index 00000000000..2c4d59d058a --- /dev/null +++ b/changelog/66705.fixed.md @@ -0,0 +1 @@ +backport the fix from #66164 to fix #65703. use OrderedDict to fix bad indexing. diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py index 191587dcd85..15f14b93cee 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py @@ -20,6 +20,7 @@ import re import shutil import tempfile import time +from collections import OrderedDict from urllib.error import HTTPError from urllib.request import Request as _Request from urllib.request import urlopen as _urlopen @@ -204,24 +205,26 @@ if not HAS_APT: repo_line.append(self.type) opts = _get_opts(self.line) if self.architectures: - archs = ",".join(self.architectures) - opts["arch"]["full"] = f"arch={archs}" + if "arch" not in opts: + opts["arch"] = {} + opts["arch"]["full"] = f"arch={','.join(self.architectures)}" opts["arch"]["value"] = self.architectures if self.signedby: + if "signedby" not in opts: + opts["signedby"] = {} opts["signedby"]["full"] = f"signed-by={self.signedby}" opts["signedby"]["value"] = self.signedby - ordered_opts = [ - opt_type for opt_type, opt in opts.items() if opt["full"] != "" - ] + ordered_opts = [] for opt in opts.values(): if opt["full"] != "": - ordered_opts[opt["index"]] = opt["full"] + ordered_opts.append(opt["full"]) if ordered_opts: - repo_line.append("[{}]".format(" ".join(ordered_opts))) + repo_line.append(f"[{' '.join(ordered_opts)}]") + print("repo_line") repo_line += [self.uri, self.dist, " ".join(self.comps)] if self.comment: repo_line.append(f"#{self.comment}") @@ -237,10 +240,12 @@ if not HAS_APT: if repo_line[1].startswith("["): repo_line = [x for x in (line.strip("[]") for line in repo_line) if x] opts = _get_opts(self.line) - self.architectures.extend(opts["arch"]["value"]) - self.signedby = opts["signedby"]["value"] - for opt in opts: - opt = opts[opt]["full"] + if "arch" in opts: + self.architectures.extend(opts["arch"]["value"]) + if "signedby" in opts: + self.signedby = opts["signedby"]["value"] + for opt in opts.values(): + opt = opt["full"] if opt: try: repo_line.pop(repo_line.index(opt)) @@ -1732,31 +1737,27 @@ def _get_opts(line): Return all opts in [] for a repo line """ get_opts = re.search(r"\[(.*=.*)\]", line) - ret = { - "arch": {"full": "", "value": "", "index": 0}, - "signedby": {"full": "", "value": "", "index": 0}, - } + ret = OrderedDict() if not get_opts: return ret opts = get_opts.group(0).strip("[]") architectures = [] - for idx, opt in enumerate(opts.split()): + for opt in opts.split(): if opt.startswith("arch"): architectures.extend(opt.split("=", 1)[1].split(",")) + ret["arch"] = {} ret["arch"]["full"] = opt ret["arch"]["value"] = architectures - ret["arch"]["index"] = idx elif opt.startswith("signed-by"): + ret["signedby"] = {} ret["signedby"]["full"] = opt ret["signedby"]["value"] = opt.split("=", 1)[1] - ret["signedby"]["index"] = idx else: other_opt = opt.split("=", 1)[0] ret[other_opt] = {} ret[other_opt]["full"] = opt ret[other_opt]["value"] = opt.split("=", 1)[1] - ret[other_opt]["index"] = idx return ret diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py index fdad2be4343..90fd6e1a828 100644 --- a/tests/pytests/unit/modules/test_aptpkg.py +++ b/tests/pytests/unit/modules/test_aptpkg.py @@ -2303,3 +2303,38 @@ def test_set_selections_test(): with patch_get_sel, patch_call_apt, patch_opts: ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}') assert ret == {} + + +def test__get_opts(): + tests = [ + { + "oneline": "deb [signed-by=/etc/apt/keyrings/example.key arch=amd64] https://example.com/pub/repos/apt xenial main", + "result": { + "signedby": { + "full": "signed-by=/etc/apt/keyrings/example.key", + "value": "/etc/apt/keyrings/example.key", + }, + "arch": {"full": "arch=amd64", "value": ["amd64"]}, + }, + }, + { + "oneline": "deb [arch=amd64 signed-by=/etc/apt/keyrings/example.key] https://example.com/pub/repos/apt xenial main", + "result": { + "arch": {"full": "arch=amd64", "value": ["amd64"]}, + "signedby": { + "full": "signed-by=/etc/apt/keyrings/example.key", + "value": "/etc/apt/keyrings/example.key", + }, + }, + }, + { + "oneline": "deb [arch=amd64] https://example.com/pub/repos/apt xenial main", + "result": { + "arch": {"full": "arch=amd64", "value": ["amd64"]}, + }, + }, + ] + + for test in tests: + ret = aptpkg._get_opts(test["oneline"]) + assert ret == test["result"] From 3b361a9e5b13dad3138c5a2ec9bce56f3c2d7615 Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Wed, 13 Mar 2024 16:25:36 +0000 Subject: [PATCH 032/102] attempt #1 to fix tests --- salt/modules/aptpkg.py | 25 +++++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py index 15f14b93cee..6ce8c133d19 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py @@ -1770,7 +1770,11 @@ def _split_repo_str(repo): if not HAS_APT: signedby = entry.signedby else: - signedby = _get_opts(line=repo)["signedby"].get("value", "") + opts = _get_opts(line=repo) + if "signedby" in opts: + signedby = opts["signedby"].get("value", "") + else: + signedby = "" if signedby: # python3-apt does not support signedby. So if signedby # is in the repo we have to check our code to see if the @@ -1938,7 +1942,12 @@ def list_repos(**kwargs): if not HAS_APT: signedby = source.signedby else: - signedby = _get_opts(line=source.line)["signedby"].get("value", "") + opts = _get_opts(line=source.line) + if "signedby" in opts: + signedby = opts["signedby"].get("value", "") + else: + signedby = "" + repo = {} repo["file"] = source.file repo["comps"] = getattr(source, "comps", []) @@ -2958,7 +2967,11 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): if not HAS_APT: signedby = mod_source.signedby else: - signedby = _get_opts(repo)["signedby"].get("value", "") + opts = _get_opts(repo) + if "signedby" in opts: + signedby = opts["signedby"].get("value", "") + else: + signedby = "" return { repo: { @@ -3059,7 +3072,11 @@ def _expand_repo_def(os_name, os_codename=None, **kwargs): signedby = source_entry.signedby kwargs["signedby"] = signedby else: - signedby = _get_opts(repo)["signedby"].get("value", "") + opts = _get_opts(repo) + if "signedby" in opts: + signedby = opts["signedby"].get("value", "") + else: + signedby = "" _source_entry = source_list.add( type=source_entry.type, From 666b50a56b8c7cc4aa2cf7c33428393d162e19b9 Mon Sep 17 00:00:00 2001 From: Thomas Phipps Date: Wed, 13 Mar 2024 23:17:01 +0000 Subject: [PATCH 033/102] stupid mistake correction --- salt/modules/aptpkg.py | 1 - 1 file changed, 1 deletion(-) diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py index 6ce8c133d19..a08c7348b7e 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py @@ -224,7 +224,6 @@ if not HAS_APT: if ordered_opts: repo_line.append(f"[{' '.join(ordered_opts)}]") - print("repo_line") repo_line += [self.uri, self.dist, " ".join(self.comps)] if self.comment: repo_line.append(f"#{self.comment}") From 8fc4738ead7d69030be67392a7db5943e58300a5 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 12 Mar 2024 09:52:47 +0000 Subject: [PATCH 034/102] Upgrade to Pytest 8.1.x --- .pre-commit-config.yaml | 5 ----- requirements/static/ci/py3.10/cloud.txt | 4 ++-- requirements/static/ci/py3.10/darwin.txt | 4 ++-- requirements/static/ci/py3.10/freebsd.txt | 4 ++-- requirements/static/ci/py3.10/linux.txt | 4 ++-- requirements/static/ci/py3.10/windows.txt | 4 ++-- requirements/static/ci/py3.11/cloud.txt | 4 ++-- requirements/static/ci/py3.11/darwin.txt | 4 ++-- requirements/static/ci/py3.11/freebsd.txt | 4 ++-- requirements/static/ci/py3.11/linux.txt | 4 ++-- requirements/static/ci/py3.11/windows.txt | 4 ++-- requirements/static/ci/py3.12/cloud.txt | 4 ++-- requirements/static/ci/py3.12/darwin.txt | 4 ++-- requirements/static/ci/py3.12/freebsd.txt | 4 ++-- requirements/static/ci/py3.12/linux.txt | 4 ++-- requirements/static/ci/py3.12/windows.txt | 4 ++-- requirements/static/ci/py3.8/cloud.txt | 4 ++-- requirements/static/ci/py3.8/freebsd.txt | 4 ++-- requirements/static/ci/py3.8/linux.txt | 4 ++-- requirements/static/ci/py3.8/windows.txt | 4 ++-- requirements/static/ci/py3.9/cloud.txt | 4 ++-- requirements/static/ci/py3.9/darwin.txt | 4 ++-- requirements/static/ci/py3.9/freebsd.txt | 4 ++-- requirements/static/ci/py3.9/linux.txt | 4 ++-- requirements/static/ci/py3.9/windows.txt | 4 ++-- 25 files changed, 48 insertions(+), 53 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7389a5d5b8c..5d338177f40 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -621,7 +621,6 @@ repos: - id: pip-tools-compile alias: compile-ci-linux-crypto-3.10-requirements name: Linux CI Py3.10 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.10/linux-crypto\.txt))$ pass_filenames: false args: @@ -636,7 +635,6 @@ repos: - id: pip-tools-compile alias: compile-ci-linux-crypto-3.11-requirements name: Linux CI Py3.11 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/linux-crypto\.txt))$ pass_filenames: false args: @@ -651,7 +649,6 @@ repos: - id: pip-tools-compile alias: compile-ci-linux-crypto-3.12-requirements name: Linux CI Py3.12 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/linux-crypto\.txt))$ pass_filenames: false args: @@ -832,7 +829,6 @@ repos: - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.11-requirements name: FreeBSD CI Py3.11 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/freebsd-crypto\.txt))$ pass_filenames: false args: @@ -847,7 +843,6 @@ repos: - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.12-requirements name: FreeBSD CI Py3.12 Crypto Requirements - files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/freebsd-crypto\.txt))$ pass_filenames: false args: diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 818a5e51986..d1e3503387e 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -383,7 +383,7 @@ platformdirs==2.2.0 # via # -c requirements/static/ci/py3.10/linux.txt # virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via # -c requirements/static/ci/py3.10/linux.txt # pytest @@ -500,7 +500,7 @@ pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 0fe37c86c69..a1580d5ba66 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -273,7 +273,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.1 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -347,7 +347,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 6c17bca2934..c1d312792a5 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -266,7 +266,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -340,7 +340,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index a5d54eb879b..97b6a9289ad 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -277,7 +277,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -357,7 +357,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 54bfbf52253..6705e5b8e0a 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -249,7 +249,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.5.4 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -325,7 +325,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index a5b718e4e8a..3eb2804f99e 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -355,7 +355,7 @@ platformdirs==2.2.0 # via # -c requirements/static/ci/py3.11/linux.txt # virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via # -c requirements/static/ci/py3.11/linux.txt # pytest @@ -464,7 +464,7 @@ pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 53ae920953d..dda251a88da 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -250,7 +250,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.1 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -318,7 +318,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 8c96cc7156f..4935f82c093 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -247,7 +247,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -317,7 +317,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index d90d36dd8f8..155c29faead 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -258,7 +258,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -334,7 +334,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 1568d217eb2..c43da61f3ac 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -245,7 +245,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.5.4 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -321,7 +321,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 34ea9ce97ab..ca788acb354 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -355,7 +355,7 @@ platformdirs==2.2.0 # via # -c requirements/static/ci/py3.12/linux.txt # virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via # -c requirements/static/ci/py3.12/linux.txt # pytest @@ -464,7 +464,7 @@ pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index e0a1c165ae8..b313223eff6 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -250,7 +250,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.1 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -318,7 +318,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index e22a444c272..d0a66a4ab78 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -247,7 +247,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -317,7 +317,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 5a851d445b7..a05f5870703 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -258,7 +258,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -334,7 +334,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 2a07172fb2a..60a0570bd0f 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -245,7 +245,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.5.4 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -321,7 +321,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index b7c860f0a21..216edb40c90 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -414,7 +414,7 @@ platformdirs==2.2.0 # via # -c requirements/static/ci/py3.8/linux.txt # virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via # -c requirements/static/ci/py3.8/linux.txt # pytest @@ -534,7 +534,7 @@ pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 2fe8376de2a..33561713194 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -291,7 +291,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -367,7 +367,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index ed32730015a..f23b5ca0173 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -297,7 +297,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -379,7 +379,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 658797085da..c6506dc71eb 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -251,7 +251,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -326,7 +326,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 411e589ac4d..930a5ae305f 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -414,7 +414,7 @@ platformdirs==2.2.0 # via # -c requirements/static/ci/py3.9/linux.txt # virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via # -c requirements/static/ci/py3.9/linux.txt # pytest @@ -536,7 +536,7 @@ pytest-timeout==2.3.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 07849b7b168..2200ac08488 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -298,7 +298,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.1 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -376,7 +376,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index ceed8c1c838..0145e2d9382 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -291,7 +291,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -369,7 +369,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index d9ce9adf4cf..6c81689ec78 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -295,7 +295,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.4 # via @@ -379,7 +379,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 58630bde24f..003fd73d4fb 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -251,7 +251,7 @@ pathtools==0.1.2 # via watchdog platformdirs==2.2.0 # via virtualenv -pluggy==0.13.0 +pluggy==1.4.0 # via pytest portend==2.6 # via @@ -327,7 +327,7 @@ pytest-system-statistics==1.0.2 # via pytest-salt-factories pytest-timeout==2.3.1 # via -r requirements/pytest.txt -pytest==7.3.2 +pytest==8.1.1 # via # -r requirements/pytest.txt # pytest-custom-exit-code From d8cb80216e0a6f5f3830053b7b09a3efd6b70843 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 22 Jan 2024 17:43:49 +0000 Subject: [PATCH 035/102] Remove custom fixture scope ordering code Signed-off-by: Pedro Algarvio --- tests/conftest.py | 101 +--------------------------------------------- 1 file changed, 1 insertion(+), 100 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index f88829a5459..e446e2329a8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,7 +10,7 @@ import re import shutil import stat import sys -from functools import lru_cache, partial, wraps +from functools import lru_cache from unittest import TestCase # pylint: disable=blacklisted-module import _pytest.logging @@ -448,8 +448,6 @@ def pytest_collection_modifyitems(config, items): groups_collection_modifyitems(config, items) from_filenames_collection_modifyitems(config, items) - log.warning("Mofifying collected tests to keep track of fixture usage") - timeout_marker_tests_paths = ( str(PYTESTS_DIR / "pkg"), str(PYTESTS_DIR / "scenarios"), @@ -478,103 +476,6 @@ def pytest_collection_modifyitems(config, items): # Default to counting only the test execution for the timeouts, ie, # withough including the fixtures setup time towards the timeout. item.add_marker(pytest.mark.timeout(90, func_only=True)) - for fixture in item.fixturenames: - if fixture not in item._fixtureinfo.name2fixturedefs: - continue - for fixturedef in item._fixtureinfo.name2fixturedefs[fixture]: - if fixturedef.scope != "package": - continue - try: - fixturedef.finish.__wrapped__ - except AttributeError: - original_func = fixturedef.finish - - def wrapper(func, fixturedef): - @wraps(func) - def wrapped(self, request, nextitem=False): - try: - return self._finished - except AttributeError: - if nextitem: - fpath = pathlib.Path(self.baseid).resolve() - tpath = pathlib.Path( - nextitem.fspath.strpath - ).resolve() - try: - tpath.relative_to(fpath) - # The test module is within the same package that the fixture is - if ( - not request.session.shouldfail - and not request.session.shouldstop - ): - log.debug( - "The next test item is still under the" - " fixture package path. Not" - " terminating %s", - self, - ) - return - except ValueError: - pass - log.debug("Finish called on %s", self) - try: - return func(request) - except ( - BaseException # pylint: disable=broad-except - ) as exc: - pytest.fail( - "Failed to run finish() on {}: {}".format( - fixturedef, exc - ), - pytrace=True, - ) - finally: - self._finished = True - - return partial(wrapped, fixturedef) - - fixturedef.finish = wrapper(fixturedef.finish, fixturedef) - try: - fixturedef.finish.__wrapped__ - except AttributeError: - fixturedef.finish.__wrapped__ = original_func - - -@pytest.hookimpl(trylast=True, hookwrapper=True) -def pytest_runtest_protocol(item, nextitem): - """ - implements the runtest_setup/call/teardown protocol for - the given test item, including capturing exceptions and calling - reporting hooks. - - :arg item: test item for which the runtest protocol is performed. - - :arg nextitem: the scheduled-to-be-next test item (or None if this - is the end my friend). This argument is passed on to - :py:func:`pytest_runtest_teardown`. - - :return boolean: True if no further hook implementations should be invoked. - - - Stops at first non-None result, see :ref:`firstresult` - """ - request = item._request - used_fixture_defs = [] - for fixture in item.fixturenames: - if fixture not in item._fixtureinfo.name2fixturedefs: - continue - for fixturedef in reversed(item._fixtureinfo.name2fixturedefs[fixture]): - if fixturedef.scope != "package": - continue - used_fixture_defs.append(fixturedef) - try: - # Run the test - yield - finally: - for fixturedef in used_fixture_defs: - fixturedef.finish(request, nextitem=nextitem) - del request - del used_fixture_defs def pytest_markeval_namespace(config): From 5bced402fbe8572d7148d132b9d712d6300f9335 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 13 Mar 2024 07:48:43 +0000 Subject: [PATCH 036/102] Upgrade to `flaky==3.8.1` --- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/cloud.txt | 2 +- requirements/static/ci/py3.11/darwin.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.11/windows.txt | 2 +- requirements/static/ci/py3.12/cloud.txt | 2 +- requirements/static/ci/py3.12/darwin.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.12/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- 24 files changed, 24 insertions(+), 24 deletions(-) diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index d1e3503387e..c948ec499d8 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -167,7 +167,7 @@ filelock==3.0.12 # via # -c requirements/static/ci/py3.10/linux.txt # virtualenv -flaky==3.7.0 +flaky==3.8.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index a1580d5ba66..1a08e11b99e 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -117,7 +117,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index c1d312792a5..d4ff409bf90 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -115,7 +115,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 97b6a9289ad..1a427909d2a 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -124,7 +124,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 6705e5b8e0a..5085f969dcd 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -110,7 +110,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.8.0 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.3 # via diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 3eb2804f99e..be47bc56fcd 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -159,7 +159,7 @@ filelock==3.0.12 # via # -c requirements/static/ci/py3.11/linux.txt # virtualenv -flaky==3.7.0 +flaky==3.8.1 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index dda251a88da..3318f874774 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -110,7 +110,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 4935f82c093..0c5db4047d5 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -111,7 +111,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 155c29faead..f65e23eef63 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -120,7 +120,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index c43da61f3ac..dd6c569671a 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -106,7 +106,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.8.0 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.3 # via diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index ca788acb354..8c59f4b425a 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -159,7 +159,7 @@ filelock==3.0.12 # via # -c requirements/static/ci/py3.12/linux.txt # virtualenv -flaky==3.7.0 +flaky==3.8.1 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index b313223eff6..7968842ee75 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -110,7 +110,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index d0a66a4ab78..73edffa6cdf 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -111,7 +111,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index a05f5870703..5f698e3728a 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -120,7 +120,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 60a0570bd0f..b460b78018b 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -106,7 +106,7 @@ etcd3-py==0.1.6 # via -r requirements/static/ci/common.in filelock==3.8.0 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.3 # via diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 216edb40c90..f8e34628f78 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -177,7 +177,7 @@ filelock==3.0.12 # via # -c requirements/static/ci/py3.8/linux.txt # virtualenv -flaky==3.7.0 +flaky==3.8.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 33561713194..e53274c32bd 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -121,7 +121,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index f23b5ca0173..e9bd555c0f7 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -127,7 +127,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index c6506dc71eb..da0991bdb2c 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -112,7 +112,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 930a5ae305f..68491d77b5d 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -177,7 +177,7 @@ filelock==3.0.12 # via # -c requirements/static/ci/py3.9/linux.txt # virtualenv -flaky==3.7.0 +flaky==3.8.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 2200ac08488..57ae604f2da 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -123,7 +123,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 0145e2d9382..722061db973 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -121,7 +121,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 6c81689ec78..593557f9482 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -125,7 +125,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 003fd73d4fb..bfaefd86fc3 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -112,7 +112,7 @@ exceptiongroup==1.0.4 # via pytest filelock==3.0.12 # via virtualenv -flaky==3.7.0 +flaky==3.8.1 # via -r requirements/pytest.txt frozenlist==1.3.0 # via From aee2110921b0c22a06daa5fa2860c7ad9e5effac Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 13 Mar 2024 16:19:18 +0000 Subject: [PATCH 037/102] Increase test timeout --- tests/pytests/functional/states/test_docker_container.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/pytests/functional/states/test_docker_container.py b/tests/pytests/functional/states/test_docker_container.py index 1e61f8b9d11..d5932cf6ad5 100644 --- a/tests/pytests/functional/states/test_docker_container.py +++ b/tests/pytests/functional/states/test_docker_container.py @@ -29,6 +29,7 @@ pytestmark = [ pytest.mark.skip_if_binaries_missing( "docker", "dockerd", reason="Docker not installed" ), + pytest.mark.timeout_unless_on_windows(120), ] IPV6_ENABLED = bool(salt.utils.network.ip_addrs6(include_loopback=True)) From 773772c4267a22306d304dddc7c9b6572e93a619 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 29 Feb 2024 16:08:22 -0700 Subject: [PATCH 038/102] Fix content-type backwards compatability --- changelog/66127.fixed.md | 1 + salt/utils/http.py | 11 +++++++++++ tests/pytests/unit/utils/test_http.py | 26 ++++++++++++++++++++++++++ 3 files changed, 38 insertions(+) create mode 100644 changelog/66127.fixed.md diff --git a/changelog/66127.fixed.md b/changelog/66127.fixed.md new file mode 100644 index 00000000000..aac9709212d --- /dev/null +++ b/changelog/66127.fixed.md @@ -0,0 +1 @@ +Fix content type backwards compatablity with http proxy post requests in the http utils module. diff --git a/salt/utils/http.py b/salt/utils/http.py index 991b4735f5b..cc1bd92fa85 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -247,6 +247,17 @@ def query( else: http_proxy_url = f"http://{proxy_host}:{proxy_port}" + if header_dict is None: + header_dict = {} + + if method == "POST" and "Content-Type" not in header_dict: + log.debug( + "Content-Type not provided for POST request, assuming application/x-www-form-urlencoded" + ) + header_dict["Content-Type"] = "application/x-www-form-urlencoded" + if "Content-Length" not in header_dict: + header_dict["Content-Length"] = f"{len(data)}" + match = re.match( r"https?://((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)($|/)", url, diff --git a/tests/pytests/unit/utils/test_http.py b/tests/pytests/unit/utils/test_http.py index 883c19a69b9..ba4e0b6a2e2 100644 --- a/tests/pytests/unit/utils/test_http.py +++ b/tests/pytests/unit/utils/test_http.py @@ -1,3 +1,5 @@ +import urllib + import pytest import requests from pytestshellutils.utils import ports @@ -309,3 +311,27 @@ def test_backends_decode_body_true(httpserver, backend): ) body = ret.get("body", "") assert isinstance(body, str) + + +def test_requests_post_content_type(httpserver): + url = httpserver.url_for("/post-content-type") + data = urllib.parse.urlencode({"payload": "test"}) + opts = { + "proxy_host": "127.0.0.1", + "proxy_port": 88, + } + with patch("requests.Session") as mock_session: + sess = MagicMock() + sess.headers = {} + mock_session.return_value = sess + ret = http.query( + url, + method="POST", + data=data, + backend="tornado", + opts=opts, + ) + assert "Content-Type" in sess.headers + assert sess.headers["Content-Type"] == "application/x-www-form-urlencoded" + assert "Content-Length" in sess.headers + assert sess.headers["Content-Length"] == "12" From 7c61ac738ed29c5d5c50f37fee9abba2a0fb9c01 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 13 Mar 2024 15:53:58 -0700 Subject: [PATCH 039/102] Check for content-type after headers dict has been populated --- salt/utils/http.py | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/salt/utils/http.py b/salt/utils/http.py index cc1bd92fa85..824a9b2310a 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -247,17 +247,6 @@ def query( else: http_proxy_url = f"http://{proxy_host}:{proxy_port}" - if header_dict is None: - header_dict = {} - - if method == "POST" and "Content-Type" not in header_dict: - log.debug( - "Content-Type not provided for POST request, assuming application/x-www-form-urlencoded" - ) - header_dict["Content-Type"] = "application/x-www-form-urlencoded" - if "Content-Length" not in header_dict: - header_dict["Content-Length"] = f"{len(data)}" - match = re.match( r"https?://((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)($|/)", url, @@ -364,6 +353,19 @@ def query( agent = f"{agent} http.query()" header_dict["User-agent"] = agent + if ( + proxy_host + and proxy_port + and method == "POST" + and "Content-Type" not in header_dict + ): + log.debug( + "Content-Type not provided for POST request, assuming application/x-www-form-urlencoded" + ) + header_dict["Content-Type"] = "application/x-www-form-urlencoded" + if "Content-Length" not in header_dict: + header_dict["Content-Length"] = f"{len(data)}" + if backend == "requests": sess = requests.Session() sess.auth = auth From 009c059b26038157e73e3ff331f0c282f10cdb26 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 14 Mar 2024 12:33:56 +0000 Subject: [PATCH 040/102] Bump to `pygments==2.17.2` due to https://github.com/advisories/GHSA-mrwq-x4v8-fh7p --- requirements/static/ci/py3.10/docs.txt | 2 +- requirements/static/ci/py3.10/tools.txt | 2 +- requirements/static/ci/py3.11/docs.txt | 2 +- requirements/static/ci/py3.11/tools.txt | 2 +- requirements/static/ci/py3.12/docs.txt | 2 +- requirements/static/ci/py3.12/tools.txt | 2 +- requirements/static/ci/py3.7/docs.txt | 2 +- requirements/static/ci/py3.8/docs.txt | 2 +- requirements/static/ci/py3.9/docs.txt | 2 +- requirements/static/ci/py3.9/tools.txt | 2 +- 10 files changed, 10 insertions(+), 10 deletions(-) diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 7574bc61153..0907c47fff0 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -123,7 +123,7 @@ pycryptodomex==3.19.1 # -r requirements/crypto.txt pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.14.0 +pygments==2.17.2 # via sphinx pytz==2022.1 # via diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index dcda605b103..d1fa49b0c1e 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -40,7 +40,7 @@ pydantic-core==2.16.2 # via pydantic pydantic==2.6.1 # via python-tools-scripts -pygments==2.13.0 +pygments==2.17.2 # via rich python-dateutil==2.8.1 # via botocore diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt index bdaf9afb140..ecb28a67c76 100644 --- a/requirements/static/ci/py3.11/docs.txt +++ b/requirements/static/ci/py3.11/docs.txt @@ -123,7 +123,7 @@ pycryptodomex==3.19.1 # -r requirements/crypto.txt pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.14.0 +pygments==2.17.2 # via sphinx pytz==2022.1 # via diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt index b980e3455dc..397f8301eb0 100644 --- a/requirements/static/ci/py3.11/tools.txt +++ b/requirements/static/ci/py3.11/tools.txt @@ -38,7 +38,7 @@ pydantic-core==2.16.2 # via pydantic pydantic==2.6.1 # via python-tools-scripts -pygments==2.13.0 +pygments==2.17.2 # via rich python-dateutil==2.8.1 # via botocore diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt index 80b51652bd9..f0e444c2a12 100644 --- a/requirements/static/ci/py3.12/docs.txt +++ b/requirements/static/ci/py3.12/docs.txt @@ -123,7 +123,7 @@ pycryptodomex==3.19.1 # -r requirements/crypto.txt pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.14.0 +pygments==2.17.2 # via sphinx pytz==2022.1 # via diff --git a/requirements/static/ci/py3.12/tools.txt b/requirements/static/ci/py3.12/tools.txt index 0617439a8e7..86fa24e6762 100644 --- a/requirements/static/ci/py3.12/tools.txt +++ b/requirements/static/ci/py3.12/tools.txt @@ -38,7 +38,7 @@ pydantic-core==2.16.2 # via pydantic pydantic==2.6.1 # via python-tools-scripts -pygments==2.13.0 +pygments==2.17.2 # via rich python-dateutil==2.8.1 # via botocore diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index 528de0370f3..e7d3faf34fe 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -127,7 +127,7 @@ pycryptodomex==3.19.1 # -r requirements/crypto.txt pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.8.1 +pygments==2.17.2 # via sphinx pytz==2022.1 # via diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index 660f0e9cbe6..94c72d532f1 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -123,7 +123,7 @@ pycryptodomex==3.19.1 # -r requirements/crypto.txt pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.8.1 +pygments==2.17.2 # via sphinx pytz==2022.1 # via diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index 1add3de6b29..3dd84014f31 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -127,7 +127,7 @@ pycryptodomex==3.19.1 # -r requirements/crypto.txt pyenchant==3.2.2 # via sphinxcontrib-spelling -pygments==2.14.0 +pygments==2.17.2 # via sphinx pytz==2022.1 # via diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index ca4ff2baff0..ed7d3c67870 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -40,7 +40,7 @@ pydantic-core==2.16.2 # via pydantic pydantic==2.6.1 # via python-tools-scripts -pygments==2.13.0 +pygments==2.17.2 # via rich python-dateutil==2.8.1 # via botocore From a80140819b984c04c14efd48685fd3add777013e Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 14 Mar 2024 20:42:56 +0000 Subject: [PATCH 041/102] Bump timeout --- tests/pytests/functional/test_version.py | 2 +- tests/pytests/integration/netapi/test_ssh_client.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/test_version.py b/tests/pytests/functional/test_version.py index b0e40dc3666..1c198abce7d 100644 --- a/tests/pytests/functional/test_version.py +++ b/tests/pytests/functional/test_version.py @@ -9,7 +9,7 @@ from tests.support.pytest.helpers import FakeSaltExtension pytestmark = [ # These are slow because they create a virtualenv and install salt in it pytest.mark.slow_test, - pytest.mark.timeout(120), + pytest.mark.timeout_unless_on_windows(240), ] log = logging.getLogger(__name__) diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py index 53c5910b476..1de9f078773 100644 --- a/tests/pytests/integration/netapi/test_ssh_client.py +++ b/tests/pytests/integration/netapi/test_ssh_client.py @@ -16,6 +16,7 @@ pytestmark = [ # has been deprecated since Python 3.7, so, the logic goes into trying to import # backports.ssl-match-hostname which is not installed on the system. ), + pytest.mark.timeout_unless_on_windows(120), ] From 62bea250ff21b0009f3ec652b2759691844a9455 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 14 Mar 2024 20:47:19 +0000 Subject: [PATCH 042/102] Minor timeout adjustments --- tests/integration/cli/test_custom_module.py | 2 +- tests/integration/modules/test_localemod.py | 2 +- .../pytests/functional/modules/state/requisites/test_unless.py | 3 +-- tests/pytests/functional/modules/test_mac_brew_pkg.py | 2 +- tests/pytests/functional/modules/test_mac_pkgutil.py | 2 +- tests/pytests/functional/modules/test_mac_softwareupdate.py | 2 +- tests/pytests/unit/grains/test_core.py | 2 +- tests/pytests/unit/utils/test_aws.py | 2 +- 8 files changed, 8 insertions(+), 9 deletions(-) diff --git a/tests/integration/cli/test_custom_module.py b/tests/integration/cli/test_custom_module.py index 0eba966e15c..5998078536c 100644 --- a/tests/integration/cli/test_custom_module.py +++ b/tests/integration/cli/test_custom_module.py @@ -61,7 +61,7 @@ class SSHCustomModuleTest(SSHCase): self.assertEqual(expected, cmd) @pytest.mark.slow_test - @pytest.mark.timeout(120) + @pytest.mark.timeout(120, func_only=True) def test_ssh_custom_module(self): """ Test custom module work using SSHCase environment diff --git a/tests/integration/modules/test_localemod.py b/tests/integration/modules/test_localemod.py index 349893e138f..7130895bafb 100644 --- a/tests/integration/modules/test_localemod.py +++ b/tests/integration/modules/test_localemod.py @@ -27,7 +27,7 @@ class LocaleModuleTest(ModuleCase): locale = self.run_function("locale.get_locale") self.assertNotIn("Unsupported platform!", locale) - @pytest.mark.timeout(120) + @pytest.mark.timeout_unless_on_windows(120) @pytest.mark.destructive_test @pytest.mark.slow_test def test_gen_locale(self): diff --git a/tests/pytests/functional/modules/state/requisites/test_unless.py b/tests/pytests/functional/modules/state/requisites/test_unless.py index 97aa37a5006..f03cbf17594 100644 --- a/tests/pytests/functional/modules/state/requisites/test_unless.py +++ b/tests/pytests/functional/modules/state/requisites/test_unless.py @@ -3,10 +3,10 @@ import pytest pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.core_test, + pytest.mark.timeout_unless_on_windows(120), ] -@pytest.mark.timeout(120) def test_unless_req(state): ret = state.single(fun="test.succeed_with_changes", name="unless test", unless=[{}]) assert ret.result is True @@ -36,7 +36,6 @@ def test_unless_req(state): assert ret.comment == "Success!" -@pytest.mark.timeout(120) def test_unless_req_retcode(state): ret = state.single( fun="test.succeed_with_changes", diff --git a/tests/pytests/functional/modules/test_mac_brew_pkg.py b/tests/pytests/functional/modules/test_mac_brew_pkg.py index e1e23606ee8..de138b586a5 100644 --- a/tests/pytests/functional/modules/test_mac_brew_pkg.py +++ b/tests/pytests/functional/modules/test_mac_brew_pkg.py @@ -7,7 +7,7 @@ import pytest pytestmark = [ pytest.mark.slow_test, - pytest.mark.timeout(120), + pytest.mark.timeout(120, func_only=True), pytest.mark.destructive_test, pytest.mark.skip_if_not_root, pytest.mark.skip_unless_on_darwin, diff --git a/tests/pytests/functional/modules/test_mac_pkgutil.py b/tests/pytests/functional/modules/test_mac_pkgutil.py index 6b9233c2ecd..df1862c496f 100644 --- a/tests/pytests/functional/modules/test_mac_pkgutil.py +++ b/tests/pytests/functional/modules/test_mac_pkgutil.py @@ -9,7 +9,7 @@ import pytest from salt.exceptions import SaltInvocationError pytestmark = [ - pytest.mark.timeout(120), + pytest.mark.timeout(120, func_only=True), pytest.mark.slow_test, pytest.mark.destructive_test, pytest.mark.skip_if_not_root, diff --git a/tests/pytests/functional/modules/test_mac_softwareupdate.py b/tests/pytests/functional/modules/test_mac_softwareupdate.py index eb5f01550ea..0e65b1fdf00 100644 --- a/tests/pytests/functional/modules/test_mac_softwareupdate.py +++ b/tests/pytests/functional/modules/test_mac_softwareupdate.py @@ -8,7 +8,7 @@ from salt.exceptions import SaltInvocationError pytestmark = [ pytest.mark.slow_test, - pytest.mark.timeout(240), + pytest.mark.timeout(240, func_only=True), pytest.mark.skip_if_binaries_missing("softwareupdate"), pytest.mark.skip_if_not_root, pytest.mark.skip_unless_on_darwin, diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py index 68a2c2f347a..fc45f0c3d91 100644 --- a/tests/pytests/unit/grains/test_core.py +++ b/tests/pytests/unit/grains/test_core.py @@ -2326,7 +2326,7 @@ def test_fqdns_return(): @pytest.mark.skip_unless_on_linux -@pytest.mark.timeout(60) +@pytest.mark.timeout(60, func_only=True) def test_fqdns_socket_error(caplog): """ test the behavior on non-critical socket errors of the dns grain diff --git a/tests/pytests/unit/utils/test_aws.py b/tests/pytests/unit/utils/test_aws.py index 22597ffcd8b..a7ab2710a42 100644 --- a/tests/pytests/unit/utils/test_aws.py +++ b/tests/pytests/unit/utils/test_aws.py @@ -22,7 +22,7 @@ pytestmark = [ # Skip testing on windows since it does not support signal.SIGALRM # which is what the timeout marker is using by default. pytest.mark.skip_on_windows, - pytest.mark.timeout(60, method=DEFAULT_METHOD), + pytest.mark.timeout(60, method=DEFAULT_METHOD, func_only=True), ] From da57d43cc045ded6bdea3c34e9516db90fa22fd4 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 15 Mar 2024 14:43:45 +0000 Subject: [PATCH 043/102] Randomize names and prefer `salt_master.state_tree.base` --- tests/pytests/integration/ssh/test_cp.py | 78 +++++++++++------------- 1 file changed, 34 insertions(+), 44 deletions(-) diff --git a/tests/pytests/integration/ssh/test_cp.py b/tests/pytests/integration/ssh/test_cp.py index 030dc3b14e9..ee240f23252 100644 --- a/tests/pytests/integration/ssh/test_cp.py +++ b/tests/pytests/integration/ssh/test_cp.py @@ -4,6 +4,7 @@ import time from pathlib import Path import pytest +from saltfactories.utils import random_string from tests.support.runtests import RUNTIME_VARS @@ -14,7 +15,7 @@ pytestmark = [ @pytest.fixture(scope="module", autouse=True) -def pillar_tree(base_env_pillar_tree_root_dir): +def _pillar_tree(salt_master): top_file = """ base: 'localhost': @@ -27,14 +28,9 @@ def pillar_tree(base_env_pillar_tree_root_dir): alot: many script: grail """ - top_tempfile = pytest.helpers.temp_file( - "top.sls", top_file, base_env_pillar_tree_root_dir - ) - basic_tempfile = pytest.helpers.temp_file( - "basic.sls", basic_pillar_file, base_env_pillar_tree_root_dir - ) - - with top_tempfile, basic_tempfile: + with salt_master.pillar_tree.base.temp_file( + "top.sls", top_file + ), salt_master.pillar_tree.base.temp_file("basic.sls", basic_pillar_file): yield @@ -94,7 +90,7 @@ def test_get_file(salt_ssh_cli, tmp_path, template, dst_is_dir, cachedir): def test_get_file_gzipped(salt_ssh_cli, caplog, tmp_path): - tgt = tmp_path / "foo" + tgt = tmp_path / random_string("foo-") res = salt_ssh_cli.run("cp.get_file", "salt://grail/scene33", str(tgt), gzip=5) assert res.returncode == 0 assert res.data @@ -578,7 +574,8 @@ def test_cache_file(salt_ssh_cli, suffix, cachedir): @pytest.fixture -def _cache_twice(base_env_state_tree_root_dir, request, salt_ssh_cli, cachedir): +def _cache_twice(salt_master, request, salt_ssh_cli, cachedir): + # ensure the cache is clean tgt = cachedir / "extrn_files" / "base" / "repo.saltproject.io" / "index.html" tgt.unlink(missing_ok=True) @@ -609,7 +606,7 @@ def _cache_twice(base_env_state_tree_root_dir, request, salt_ssh_cli, cachedir): {{%- set res2 = salt["cp.cache_file"]("{src}") %}} {{{{ res2 }}}} """ - with pytest.helpers.temp_file(name, contents, base_env_state_tree_root_dir): + with salt_master.state_tree.base.temp_file(name, contents): yield f"salt://{name}" @@ -695,24 +692,22 @@ def test_cache_dir_nonexistent_source(salt_ssh_cli, caplog): assert not res.data -def test_list_states(salt_ssh_cli, tmp_path, base_env_state_tree_root_dir): +def test_list_states(salt_master, salt_ssh_cli, tmp_path): top_sls = """ base: '*': - core """ - tgt = tmp_path / "testfile" - core_state = f""" - {tgt}/testfile: + {tmp_path / "testfile"}/testfile: file.managed: - source: salt://testfile - makedirs: true """ - with pytest.helpers.temp_file( - "top.sls", top_sls, base_env_state_tree_root_dir - ), pytest.helpers.temp_file("core.sls", core_state, base_env_state_tree_root_dir): + with salt_master.state_tree.base.temp_file( + "top.sls", top_sls + ), salt_master.state_tree.base.temp_file("core.sls", core_state): res = salt_ssh_cli.run( "cp.list_states", ) @@ -757,22 +752,19 @@ def test_list_master_dirs(salt_ssh_cli): assert path not in res.data -def test_list_master_symlinks(salt_ssh_cli, base_env_state_tree_root_dir): +def test_list_master_symlinks(salt_ssh_cli, salt_master): if salt_ssh_cli.config.get("fileserver_ignoresymlinks", False): pytest.skip("Fileserver is configured to ignore symlinks") - with pytest.helpers.temp_file("foo", "", base_env_state_tree_root_dir) as tgt: + with salt_master.state_tree.base.temp_file(random_string("foo-"), "") as tgt: sym = tgt.parent / "test_list_master_symlinks" - try: - sym.symlink_to(tgt) - res = salt_ssh_cli.run("cp.list_master_symlinks") - assert res.returncode == 0 - assert res.data - assert isinstance(res.data, dict) - assert res.data - assert sym.name in res.data - assert res.data[sym.name] == str(tgt) - finally: - sym.unlink() + sym.symlink_to(tgt) + res = salt_ssh_cli.run("cp.list_master_symlinks") + assert res.returncode == 0 + assert res.data + assert isinstance(res.data, dict) + assert res.data + assert sym.name in res.data + assert res.data[sym.name] == str(tgt) @pytest.fixture(params=(False, "cached", "render_cached")) @@ -857,9 +849,7 @@ def test_hash_file_local(salt_ssh_cli, caplog): @pytest.fixture -def state_tree_jinjaimport(base_env_state_tree_root_dir, tmp_path): - tgt = tmp_path / "config.conf" - base_path = base_env_state_tree_root_dir / "my" +def state_tree_jinjaimport(tmp_path, salt_master): map_contents = """{%- set mapdata = {"foo": "bar"} %}""" managed_contents = """ {%- from "my/map.jinja" import mapdata with context %} @@ -870,18 +860,18 @@ def state_tree_jinjaimport(base_env_state_tree_root_dir, tmp_path): Serialize config: file.managed: - - name: {tgt} + - name: {tmp_path / "config.conf"} - source: salt://my/files/config.conf.j2 - template: jinja """ - with pytest.helpers.temp_file( - "file_managed_import.sls", state_contents, base_path - ) as state: - with pytest.helpers.temp_file("map.jinja", map_contents, base_path): - with pytest.helpers.temp_file( - "config.conf.j2", managed_contents, base_path / "files" - ): - yield f"my.{state.stem}" + with salt_master.state_tree.base.temp_file( + "my/file_managed_import.sls", state_contents + ) as state, salt_master.state_tree.base.temp_file( + "my/map.jinja", map_contents + ), salt_master.state_tree.base.temp_file( + "my/files/config.conf.j2", managed_contents + ): + yield f"my.{state.stem}" def test_cp_cache_file_as_workaround_for_missing_map_file( From c0823f6a68888e57a0fac7fdc58fc0b927ba61d2 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 18 Mar 2024 07:45:44 +0000 Subject: [PATCH 044/102] Increase timeouts --- .../pytests/functional/modules/state/requisites/test_unless.py | 1 + tests/pytests/functional/states/test_docker_container.py | 2 +- tests/pytests/integration/states/test_beacon.py | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/modules/state/requisites/test_unless.py b/tests/pytests/functional/modules/state/requisites/test_unless.py index 237163da40c..b4b19670ceb 100644 --- a/tests/pytests/functional/modules/state/requisites/test_unless.py +++ b/tests/pytests/functional/modules/state/requisites/test_unless.py @@ -3,6 +3,7 @@ import pytest pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.core_test, + pytest.mark.timeout_unless_on_windows(240), ] diff --git a/tests/pytests/functional/states/test_docker_container.py b/tests/pytests/functional/states/test_docker_container.py index d5932cf6ad5..2cfc5b7a343 100644 --- a/tests/pytests/functional/states/test_docker_container.py +++ b/tests/pytests/functional/states/test_docker_container.py @@ -29,7 +29,7 @@ pytestmark = [ pytest.mark.skip_if_binaries_missing( "docker", "dockerd", reason="Docker not installed" ), - pytest.mark.timeout_unless_on_windows(120), + pytest.mark.timeout_unless_on_windows(240), ] IPV6_ENABLED = bool(salt.utils.network.ip_addrs6(include_loopback=True)) diff --git a/tests/pytests/integration/states/test_beacon.py b/tests/pytests/integration/states/test_beacon.py index 5aefa6ecf2b..1b0db5d65fe 100644 --- a/tests/pytests/integration/states/test_beacon.py +++ b/tests/pytests/integration/states/test_beacon.py @@ -26,6 +26,7 @@ def test_present_absent(salt_master, salt_minion, salt_call_cli): ret = salt_call_cli.run( "state.apply", "manage_beacons", + _timeout=120, ) assert ret.returncode == 0 state_id = "beacon_|-beacon-diskusage_|-diskusage_|-present" From e50d3a194fbe5562c7e39ba5eb9dd5d044c8ed56 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 18 Mar 2024 08:46:37 +0000 Subject: [PATCH 045/102] Skip problematic MacOS test in specific conditions --- .../functional/modules/test_mac_assistive.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/pytests/functional/modules/test_mac_assistive.py b/tests/pytests/functional/modules/test_mac_assistive.py index d2b6808b56d..d7ea4518023 100644 --- a/tests/pytests/functional/modules/test_mac_assistive.py +++ b/tests/pytests/functional/modules/test_mac_assistive.py @@ -22,7 +22,7 @@ def assistive(modules): def osa_script(assistive): osa_script_path = "/usr/bin/osascript" try: - ret = assistive.install(osa_script_path, True) + assistive.install(osa_script_path, True) yield osa_script_path except CommandExecutionError as exc: pytest.skip(f"Unable to install {osa_script}: {exc}") @@ -33,7 +33,7 @@ def osa_script(assistive): @pytest.fixture -def install_remove_pkg_name(assistive): +def install_remove_pkg_name(assistive, grains): smile_bundle = "com.smileonmymac.textexpander" try: yield smile_bundle @@ -44,12 +44,19 @@ def install_remove_pkg_name(assistive): @pytest.mark.slow_test -def test_install_and_remove(assistive, install_remove_pkg_name): +def test_install_and_remove(assistive, install_remove_pkg_name, grains): """ Tests installing and removing a bundled ID or command to use assistive access. """ - ret = assistive.install(install_remove_pkg_name) - assert ret + try: + ret = assistive.install(install_remove_pkg_name) + assert ret + except CommandExecutionError as exc: + if grains["osmajorrelease"] != 12: + raise exc from None + if "attempt to write a readonly database" not in str(exc): + raise exc from None + pytest.skip("Test fails on MacOS 12(attempt to write a readonly database)") ret = assistive.remove(install_remove_pkg_name) assert ret From 608a8ffd1c478b84da81612a84519051fbcc1095 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Mon, 11 Mar 2024 11:21:00 -0600 Subject: [PATCH 046/102] Whitelist package tests for Windows --- tests/conftest.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index e446e2329a8..8384f4a0352 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -722,11 +722,14 @@ def pytest_runtest_setup(item): entropy_generator.generate_entropy() if salt.utils.platform.is_windows(): - unit_tests_paths = ( + auto_whitelisted_paths = ( str(TESTS_DIR / "unit"), str(PYTESTS_DIR / "unit"), + str(PYTESTS_DIR / "pkg"), ) - if not str(pathlib.Path(item.fspath).resolve()).startswith(unit_tests_paths): + if not str(pathlib.Path(item.fspath).resolve()).startswith( + auto_whitelisted_paths + ): # Unit tests are whitelisted on windows by default, so, we're only # after all other tests windows_whitelisted_marker = item.get_closest_marker("windows_whitelisted") From 487424408d30375b5a10743f28f2295cc06181b5 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Tue, 12 Mar 2024 07:32:52 -0600 Subject: [PATCH 047/102] Fix package tests --- tests/support/pkg.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/support/pkg.py b/tests/support/pkg.py index 1e0beddb472..35a2dd24790 100644 --- a/tests/support/pkg.py +++ b/tests/support/pkg.py @@ -199,7 +199,7 @@ class SaltPkgInstall: Default location for salt configurations """ if platform.is_windows(): - config_path = pathlib.Path("C://salt", "etc", "salt") + config_path = pathlib.Path("C:\\ProgramData", "Salt Project", "Salt") else: config_path = pathlib.Path("/etc", "salt") return config_path @@ -406,7 +406,7 @@ class SaltPkgInstall: if system_service is False: return None if platform.is_windows(): - return pathlib.Path("C:/salt") + return pathlib.Path("C:\\Program Files", "Salt Project", "Salt") if platform.is_darwin(): return pathlib.Path("/opt/salt") return pathlib.Path("/") From 76d750dce9e0c4258d365494a220190d93fa9653 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Thu, 14 Mar 2024 14:31:30 -0600 Subject: [PATCH 048/102] Make the tests run on Windows --- tests/conftest.py | 1 + tests/support/pkg.py | 75 +++++++++++++++++++++++++++++++++++++++----- 2 files changed, 69 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 8384f4a0352..aef67bcb246 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -448,6 +448,7 @@ def pytest_collection_modifyitems(config, items): groups_collection_modifyitems(config, items) from_filenames_collection_modifyitems(config, items) + log.warning("Modifying collected tests to keep track of fixture usage") timeout_marker_tests_paths = ( str(PYTESTS_DIR / "pkg"), str(PYTESTS_DIR / "scenarios"), diff --git a/tests/support/pkg.py b/tests/support/pkg.py index 35a2dd24790..404aa015ef7 100644 --- a/tests/support/pkg.py +++ b/tests/support/pkg.py @@ -199,7 +199,7 @@ class SaltPkgInstall: Default location for salt configurations """ if platform.is_windows(): - config_path = pathlib.Path("C:\\ProgramData", "Salt Project", "Salt") + config_path = pathlib.Path(os.getenv("ProgramData"), "Salt Project", "Salt") else: config_path = pathlib.Path("/etc", "salt") return config_path @@ -237,7 +237,7 @@ class SaltPkgInstall: break if not version: pytest.fail( - f"Failed to package artifacts in '{ARTIFACTS_DIR}'. " + f"Failed to find package artifacts in '{ARTIFACTS_DIR}'. " f"Directory Contents:\n{pprint.pformat(artifacts)}" ) return version @@ -295,6 +295,10 @@ class SaltPkgInstall: self.run_root = self.bin_dir / "run" else: log.error("Unexpected file extension: %s", self.file_ext) + log.debug("root: %s", self.root) + log.debug("bin_dir: %s", self.bin_dir) + log.debug("ssm_bin: %s", self.ssm_bin) + log.debug("run_root: %s", self.run_root) if not self.pkgs: pytest.fail("Could not find Salt Artifacts") @@ -400,6 +404,9 @@ class SaltPkgInstall: self.binary_paths["spm"] = [shutil.which("salt-spm")] else: self.binary_paths["pip"] = [shutil.which("salt-pip")] + log.debug("python_bin: %s", python_bin) + log.debug("binary_paths: %s", self.binary_paths) + log.debug("install_dir: %s", self.install_dir) @staticmethod def salt_factories_root_dir(system_service: bool = False) -> pathlib.Path: @@ -413,8 +420,8 @@ class SaltPkgInstall: def _check_retcode(self, ret): """ - helper function ot check subprocess.run - returncode equals 0, if not raise assertionerror + Helper function to check subprocess.run returncode equals 0 + If not raise AssertionError """ if ret.returncode != 0: log.error(ret) @@ -456,8 +463,14 @@ class SaltPkgInstall: # Remove the service installed by the installer log.debug("Removing installed salt-minion service") self.proc.run(str(self.ssm_bin), "remove", "salt-minion", "confirm") + + # Add installation to the path self.update_process_path() + # Install the service using our config + if self.pkg_system_service: + self._install_ssm_service() + elif platform.is_darwin(): daemons_dir = pathlib.Path("/Library", "LaunchDaemons") service_name = "com.saltstack.salt.minion" @@ -504,6 +517,54 @@ class SaltPkgInstall: log.info(ret) self._check_retcode(ret) + def _install_ssm_service(self, service="minion"): + """ + This function installs the service on Windows using SSM but does not + start it. + + Args: + + service (str): + The name of the service. Default is ``minion`` + """ + service_name = f"salt-{service}" + binary = self.install_dir / f"{service_name}.exe" + ret = self.proc.run( + str(self.ssm_bin), + "install", + service_name, + binary, + "-c", + f'"{str(self.conf_dir)}"', + ) + self._check_retcode(ret) + ret = self.proc.run( + str(self.ssm_bin), + "set", + service_name, + "Description", + "Salt Minion for testing", + ) + self._check_retcode(ret) + # This doesn't start the service. It will start automatically on reboot + # It is set here to make it the same as what the installer does + ret = self.proc.run( + str(self.ssm_bin), "set", service_name, "Start", "SERVICE_AUTO_START" + ) + self._check_retcode(ret) + ret = self.proc.run( + str(self.ssm_bin), "set", service_name, "AppStopMethodConsole", "24000" + ) + self._check_retcode(ret) + ret = self.proc.run( + str(self.ssm_bin), "set", service_name, "AppStopMethodWindow", "2000" + ) + self._check_retcode(ret) + ret = self.proc.run( + str(self.ssm_bin), "set", service_name, "AppRestartDelay", "60000" + ) + self._check_retcode(ret) + def package_python_version(self): return self.proc.run( str(self.binary_paths["python"][0]), @@ -750,7 +811,7 @@ class SaltPkgInstall: self._check_retcode(ret) if self.pkg_system_service: - self._install_system_service() + self._install_ssm_service() elif platform.is_darwin(): if self.classic: @@ -1217,8 +1278,8 @@ class PkgSsmSaltDaemonImpl(PkgSystemdSaltDaemonImpl): # Dereference the internal _process attribute self._process = None - # Lets log and kill any child processes left behind, including the main subprocess - # if it failed to properly stop + # Let's log and kill any child processes left behind, including the main + # subprocess if it failed to properly stop terminate_process( pid=pid, kill_children=True, From e23893fb51ccfb469c91abf2c5850c6c22503792 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Fri, 15 Mar 2024 08:46:26 -0600 Subject: [PATCH 049/102] Skip state test on Windows --- tests/pytests/pkg/download/test_pkg_download.py | 2 +- tests/pytests/pkg/integration/test_salt_state_file.py | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/pytests/pkg/download/test_pkg_download.py b/tests/pytests/pkg/download/test_pkg_download.py index b774d9fa047..9a0fbd76bad 100644 --- a/tests/pytests/pkg/download/test_pkg_download.py +++ b/tests/pytests/pkg/download/test_pkg_download.py @@ -464,7 +464,7 @@ def setup_windows( try: arch = os.environ.get("SALT_REPO_ARCH") or "amd64" if package_type != "onedir": - root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") + root_dir = pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt") if packaging.version.parse(salt_release) > packaging.version.parse("3005"): if package_type.lower() == "nsis": diff --git a/tests/pytests/pkg/integration/test_salt_state_file.py b/tests/pytests/pkg/integration/test_salt_state_file.py index 7b71fcb2365..1aadf3dbddb 100644 --- a/tests/pytests/pkg/integration/test_salt_state_file.py +++ b/tests/pytests/pkg/integration/test_salt_state_file.py @@ -5,6 +5,10 @@ import pytest from pytestskipmarkers.utils import platform from saltfactories.utils.functional import MultiStateResult +pytestmark = [ + pytest.mark.skip_on_windows, +] + @pytest.fixture def files(tmp_path): From 92d6d41f0cc3170d6fd2fd99ee65f550ab9ec319 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Fri, 15 Mar 2024 09:02:41 -0600 Subject: [PATCH 050/102] Revert weird change --- tests/support/pkg.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/support/pkg.py b/tests/support/pkg.py index 404aa015ef7..6163415d00b 100644 --- a/tests/support/pkg.py +++ b/tests/support/pkg.py @@ -199,7 +199,7 @@ class SaltPkgInstall: Default location for salt configurations """ if platform.is_windows(): - config_path = pathlib.Path(os.getenv("ProgramData"), "Salt Project", "Salt") + config_path = pathlib.Path("C:\\salt", "etc", "salt") else: config_path = pathlib.Path("/etc", "salt") return config_path @@ -413,7 +413,7 @@ class SaltPkgInstall: if system_service is False: return None if platform.is_windows(): - return pathlib.Path("C:\\Program Files", "Salt Project", "Salt") + return pathlib.Path("C:\\salt") if platform.is_darwin(): return pathlib.Path("/opt/salt") return pathlib.Path("/") From e3fcc657b988beebdc3ee2bac8ebf9b285b9bf7f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 18 Mar 2024 17:58:44 +0000 Subject: [PATCH 051/102] Remove invalid properties from existing GH actions --- .github/actions/build-onedir-deps/action.yml | 15 +++------------ .github/actions/build-onedir-salt/action.yml | 16 +++------------- .github/actions/build-source-tarball/action.yml | 9 +-------- .github/actions/cached-virtualenv/action.yml | 11 ++++------- .github/actions/download-artifact/action.yml | 1 + .github/actions/get-python-version/action.yml | 7 ++++++- .github/actions/setup-actionlint/action.yml | 3 ++- .github/actions/setup-pre-commit/action.yml | 6 ------ .../setup-python-tools-scripts/action.yml | 8 +------- .github/actions/setup-relenv/action.yml | 12 ++---------- .github/actions/setup-salt-version/action.yml | 13 +++++-------- .github/actions/setup-shellcheck/action.yml | 3 ++- .github/actions/upload-artifact/action.yml | 1 + 13 files changed, 31 insertions(+), 74 deletions(-) diff --git a/.github/actions/build-onedir-deps/action.yml b/.github/actions/build-onedir-deps/action.yml index 511fe5a5275..cdfdf7d917a 100644 --- a/.github/actions/build-onedir-deps/action.yml +++ b/.github/actions/build-onedir-deps/action.yml @@ -1,37 +1,26 @@ --- name: build-onedir-deps description: Build Onedir Dependencies + inputs: platform: required: true - type: string description: The platform to build arch: required: true - type: string description: The platform arch to build python-version: required: true - type: string description: The python version to build package-name: required: false - type: string description: The onedir package name to create default: salt cache-prefix: required: true - type: string description: Seed used to invalidate caches -env: - COLUMNS: 190 - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - RELENV_BUILDENV: 1 - - runs: using: composite @@ -56,6 +45,8 @@ runs: - name: Install Salt Onedir Package Dependencies shell: bash if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' + env: + RELENV_BUILDENV: "1" run: | tools pkg build onedir-dependencies --arch ${{ inputs.arch }} --python-version ${{ inputs.python-version }} --package-name artifacts/${{ inputs.package-name }} --platform ${{ inputs.platform }} diff --git a/.github/actions/build-onedir-salt/action.yml b/.github/actions/build-onedir-salt/action.yml index 6952ccf98e4..2534cec01ed 100644 --- a/.github/actions/build-onedir-salt/action.yml +++ b/.github/actions/build-onedir-salt/action.yml @@ -1,41 +1,29 @@ --- name: build-onedir-salt description: Build Onedir Package + inputs: platform: required: true - type: string description: The platform to build arch: required: true - type: string description: The platform arch to build package-name: required: false - type: string description: The onedir package name to create default: salt cache-prefix: required: true - type: string description: Seed used to invalidate caches python-version: required: true - type: string description: The python version to build salt-version: - type: string required: true description: The Salt version to set prior to building packages. -env: - COLUMNS: 190 - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - RELENV_BUILDENV: 1 - - runs: using: composite @@ -64,6 +52,8 @@ runs: - name: Install Salt Into Onedir shell: bash + env: + RELENV_BUILDENV: "1" run: | tools pkg build salt-onedir salt-${{ inputs.salt-version }}.tar.gz --platform ${{ inputs.platform }} --package-name artifacts/${{ inputs.package-name }} diff --git a/.github/actions/build-source-tarball/action.yml b/.github/actions/build-source-tarball/action.yml index 8c662d8ee8b..22a5a674fca 100644 --- a/.github/actions/build-source-tarball/action.yml +++ b/.github/actions/build-source-tarball/action.yml @@ -1,24 +1,17 @@ --- name: build-source-tarball description: Build Source Tarball + inputs: salt-version: - type: string required: true description: The Salt version to set prior to building the tarball. nox-version: required: false - type: string description: The version of Nox to install default: "2022.8.7" -env: - COLUMNS: 190 - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - - runs: using: composite diff --git a/.github/actions/cached-virtualenv/action.yml b/.github/actions/cached-virtualenv/action.yml index f135d9116e5..4d0ddf099fa 100644 --- a/.github/actions/cached-virtualenv/action.yml +++ b/.github/actions/cached-virtualenv/action.yml @@ -5,26 +5,23 @@ description: Setup a cached python virtual environment inputs: name: required: true - type: string description: The Virtualenv Name cache-seed: required: true - type: string description: Seed used to invalidate caches + outputs: cache-hit: + description: 'A boolean value to indicate an exact match was found for the primary key' value: ${{ steps.cache-virtualenv.outputs.cache-hit }} cache-key: + description: The value of the cache key value: ${{ steps.setup-cache-key.outputs.cache-key }} python-executable: + description: The path to the virtualenv's python executable value: ${{ steps.define-python-executable.outputs.python-executable }} -env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - - runs: using: composite diff --git a/.github/actions/download-artifact/action.yml b/.github/actions/download-artifact/action.yml index 12a097f517f..f1b8e547b14 100644 --- a/.github/actions/download-artifact/action.yml +++ b/.github/actions/download-artifact/action.yml @@ -20,6 +20,7 @@ inputs: without overriding the existing archives. required: false + runs: using: composite steps: diff --git a/.github/actions/get-python-version/action.yml b/.github/actions/get-python-version/action.yml index f2b045f7ca7..a5bdc7f32a2 100644 --- a/.github/actions/get-python-version/action.yml +++ b/.github/actions/get-python-version/action.yml @@ -1,19 +1,24 @@ --- name: get-python-version description: Setup Relenv + inputs: python-binary: required: true - type: string description: The python binary to get the version from + outputs: binary: + description: The python binary executable value: ${{ steps.get-python-version.outputs.binary }} version: + description: The python version value: ${{ steps.get-python-version.outputs.version }} full-version: + description: The full python version value: ${{ steps.get-python-version.outputs.full-version }} version-sha256sum: + description: The sha256sum of the version value: ${{ steps.get-python-version.outputs.version-sha256sum }} diff --git a/.github/actions/setup-actionlint/action.yml b/.github/actions/setup-actionlint/action.yml index f1a81aaf35f..e965a9c60cf 100644 --- a/.github/actions/setup-actionlint/action.yml +++ b/.github/actions/setup-actionlint/action.yml @@ -1,15 +1,16 @@ --- name: setup-actionlint description: Setup actionlint + inputs: version: description: The version of actionlint default: 1.6.26 cache-seed: required: true - type: string description: Seed used to invalidate caches + runs: using: composite steps: diff --git a/.github/actions/setup-pre-commit/action.yml b/.github/actions/setup-pre-commit/action.yml index 82b8eef583d..ae62fa51edb 100644 --- a/.github/actions/setup-pre-commit/action.yml +++ b/.github/actions/setup-pre-commit/action.yml @@ -4,19 +4,13 @@ description: Setup 'pre-commit' inputs: version: - type: string description: Pre-commit version to install required: true default: 3.0.3 cache-seed: required: true - type: string description: Seed used to invalidate caches -env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - runs: using: composite diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 9d5ff710346..d060c9d4a1b 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -5,23 +5,17 @@ description: Setup 'python-tools-scripts' inputs: cache-prefix: required: true - type: string description: Seed used to invalidate caches cwd: - type: string description: The directory the salt checkout is located in default: "." outputs: version: + description: "Return the python-tools-scripts version" value: ${{ steps.get-version.outputs.version }} -env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - - runs: using: composite diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index c4cfd33f545..d8303a92447 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -1,39 +1,31 @@ --- name: setup-relenv description: Setup Relenv + inputs: platform: required: true - type: string description: The platform to build arch: required: true - type: string description: The platform arch to build python-version: required: true - type: string description: The version of python to build cache-seed: required: true - type: string description: Seed used to invalidate caches version: required: false - type: string description: The version of relenv to use default: 0.13.2 outputs: version: + description: The relenv version value: ${{ inputs.version }} -env: - PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ - PIP_EXTRA_INDEX_URL: https://pypi.org/simple - - runs: using: composite diff --git a/.github/actions/setup-salt-version/action.yml b/.github/actions/setup-salt-version/action.yml index 9f21d6f002b..7d54c1c95b5 100644 --- a/.github/actions/setup-salt-version/action.yml +++ b/.github/actions/setup-salt-version/action.yml @@ -1,32 +1,29 @@ --- name: setup-salt-version description: Setup Salt Version + inputs: cwd: - type: string default: "" + description: The current working directory to use salt-version: - type: string default: "" description: > The Salt version to set prior to running tests or building packages. If not set, it is discover at run time, like, for example, capturing the output of running `python3 salt/version.py` validate-version: - type: boolean - default: false + default: "false" description: Validate the passed version. release: - type: boolean - default: false + default: "false" description: This is a release of salt. + outputs: salt-version: value: ${{ steps.setup-salt-version.outputs.salt-version }} description: The Salt version written to `salt/_version.txt` -env: - COLUMNS: 190 runs: using: composite diff --git a/.github/actions/setup-shellcheck/action.yml b/.github/actions/setup-shellcheck/action.yml index 2c86c98a072..9e5ad30365f 100644 --- a/.github/actions/setup-shellcheck/action.yml +++ b/.github/actions/setup-shellcheck/action.yml @@ -1,15 +1,16 @@ --- name: setup-shellcheck description: Setup shellcheck + inputs: version: description: The version of shellcheck default: v0.9.0 cache-seed: required: true - type: string description: Seed used to invalidate caches + runs: using: composite steps: diff --git a/.github/actions/upload-artifact/action.yml b/.github/actions/upload-artifact/action.yml index 6c9e940a3b3..eb00196a339 100644 --- a/.github/actions/upload-artifact/action.yml +++ b/.github/actions/upload-artifact/action.yml @@ -37,6 +37,7 @@ inputs: without overriding the existing archives. required: false + runs: using: composite steps: From f0284f75e02afe42e8845af9bb01b6408889d36d Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 18 Mar 2024 18:06:26 +0000 Subject: [PATCH 052/102] Force cache invalidation by setting cache seed to 1 --- .github/workflows/ci.yml | 2 +- .github/workflows/nightly.yml | 2 +- .github/workflows/release.yml | 2 +- .github/workflows/scheduled.yml | 2 +- .github/workflows/staging.yml | 2 +- .github/workflows/templates/layout.yml.jinja | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a0c9bb9f1d1..98dd746b9d8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-7 # Bump the number to invalidate all caches + CACHE_SEED: SEED-1 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index d15600d567c..9546f94f9c2 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -22,7 +22,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-7 # Bump the number to invalidate all caches + CACHE_SEED: SEED-1 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e99b6daa2ef..9bf81b4f897 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-7 # Bump the number to invalidate all caches + CACHE_SEED: SEED-1 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 88759436e63..35d595505d6 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -12,7 +12,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-7 # Bump the number to invalidate all caches + CACHE_SEED: SEED-1 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index a5874f8ddd0..c7953f6a5fd 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -37,7 +37,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-7 # Bump the number to invalidate all caches + CACHE_SEED: SEED-1 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 21edc433e49..fba66a9dc1a 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -34,7 +34,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-7 # Bump the number to invalidate all caches + CACHE_SEED: SEED-1 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" From 533af2a851ab11dc574713703a306db10ab05241 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 18 Mar 2024 15:49:13 +0000 Subject: [PATCH 053/102] Add our own GH cache action This actions wraps actions/cache and forces an early exit if 'fail-on-cache-miss' is true Based on the work from https://github.com/saltstack/salt/pull/66240 --- .github/actions/build-onedir-deps/action.yml | 2 +- .github/actions/build-onedir-salt/action.yml | 2 +- .github/actions/cache/action.yml | 77 +++++++++++++++++++ .github/actions/cached-virtualenv/action.yml | 2 +- .github/actions/setup-actionlint/action.yml | 2 +- .github/actions/setup-pre-commit/action.yml | 2 +- .../setup-python-tools-scripts/action.yml | 2 +- .github/actions/setup-relenv/action.yml | 2 +- .github/actions/setup-shellcheck/action.yml | 2 +- .github/workflows/build-deps-ci-action.yml | 6 +- .github/workflows/build-docs.yml | 2 +- .../templates/build-deps-ci-action.yml.jinja | 6 +- 12 files changed, 92 insertions(+), 15 deletions(-) create mode 100644 .github/actions/cache/action.yml diff --git a/.github/actions/build-onedir-deps/action.yml b/.github/actions/build-onedir-deps/action.yml index cdfdf7d917a..c34133bc369 100644 --- a/.github/actions/build-onedir-deps/action.yml +++ b/.github/actions/build-onedir-deps/action.yml @@ -28,7 +28,7 @@ runs: - name: Cache Deps Onedir Package Directory id: onedir-pkg-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: artifacts/${{ inputs.package-name }} key: > diff --git a/.github/actions/build-onedir-salt/action.yml b/.github/actions/build-onedir-salt/action.yml index 2534cec01ed..0e3888cada6 100644 --- a/.github/actions/build-onedir-salt/action.yml +++ b/.github/actions/build-onedir-salt/action.yml @@ -31,7 +31,7 @@ runs: - name: Download Cached Deps Onedir Package Directory id: onedir-bare-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: artifacts/${{ inputs.package-name }} key: > diff --git a/.github/actions/cache/action.yml b/.github/actions/cache/action.yml new file mode 100644 index 00000000000..ae416246d79 --- /dev/null +++ b/.github/actions/cache/action.yml @@ -0,0 +1,77 @@ +--- +name: cache +description: GitHub Actions Cache +inputs: + path: + description: 'A list of files, directories, and wildcard patterns to cache and restore' + required: true + key: + description: 'An explicit key for restoring and saving the cache' + required: true + restore-keys: + description: 'An ordered list of keys to use for restoring stale cache if no cache hit occurred for key. Note `cache-hit` returns false in this case.' + required: false + upload-chunk-size: + description: 'The chunk size used to split up large files during upload, in bytes' + required: false + enableCrossOsArchive: + description: 'An optional boolean when enabled, allows windows runners to save or restore caches that can be restored or saved respectively on other platforms' + default: 'false' + required: false + fail-on-cache-miss: + description: 'Fail the workflow if cache entry is not found' + default: 'false' + required: false + lookup-only: + description: 'Check if a cache entry exists for the given input(s) (key, restore-keys) without downloading the cache' + default: 'false' + required: false + save-always: + description: 'Run the post step to save the cache even if another step before fails' + default: 'false' + required: false + +outputs: + cache-hit: + description: 'A boolean value to indicate an exact match was found for the primary key' + value: ${{ steps.github-cache.outputs.cache-hit }} + +runs: + using: composite + + steps: + + - name: Map inputs to environment variables + shell: bash + run: | + echo "GHA_CACHE_PATH=${{ inputs.path }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_KEY=${{ inputs.key }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE=${{ inputs.enableCrossOsArchive }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_FAIL_ON_CACHE_MISS=${{ inputs.fail-on-cache-miss }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_LOOKUP_ONLY=${{ inputs.lookup-only }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_SAVE_ALWAYS=${{ inputs.save-always }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_RESTORE_KEYS=${{ inputs.restore-keys }}" | tee -a "${GITHUB_ENV}" + echo "GHA_CACHE_UPLOAD_CHUNK_SIZE=${{ inputs.upload-chunk-size }}" | tee -a "${GITHUB_ENV}" + + - name: Cache Provided Path (GitHub Actions) + id: github-cache + uses: actions/cache@v4 + with: + path: ${{ env.GHA_CACHE_PATH }} + key: ${{ env.GHA_CACHE_KEY }} + enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }} + fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }} + lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }} + save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }} + restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }} + upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }} + + - name: Verify 'fail-on-cache-miss' + if: ${{ inputs.fail-on-cache-miss == 'true' }} + shell: bash + run: | + CACHE_HIT="${{ steps.github-cache.outputs.cache-hit }}" + if [ "$CACHE_HIT" != "true" ]; then + echo "No cache hit and fail-on-cache-miss is set to true." + exit 1 + fi diff --git a/.github/actions/cached-virtualenv/action.yml b/.github/actions/cached-virtualenv/action.yml index 4d0ddf099fa..a24b805599d 100644 --- a/.github/actions/cached-virtualenv/action.yml +++ b/.github/actions/cached-virtualenv/action.yml @@ -51,7 +51,7 @@ runs: - name: Cache VirtualEnv id: cache-virtualenv - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: key: ${{ steps.setup-cache-key.outputs.cache-key }} path: ${{ steps.virtualenv-path.outputs.venv-path }} diff --git a/.github/actions/setup-actionlint/action.yml b/.github/actions/setup-actionlint/action.yml index e965a9c60cf..44b68e86c9f 100644 --- a/.github/actions/setup-actionlint/action.yml +++ b/.github/actions/setup-actionlint/action.yml @@ -16,7 +16,7 @@ runs: steps: - name: Cache actionlint Binary - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: /usr/local/bin/actionlint key: ${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|actionlint|${{ inputs.version }} diff --git a/.github/actions/setup-pre-commit/action.yml b/.github/actions/setup-pre-commit/action.yml index ae62fa51edb..2acd58895c5 100644 --- a/.github/actions/setup-pre-commit/action.yml +++ b/.github/actions/setup-pre-commit/action.yml @@ -30,7 +30,7 @@ runs: ${{ steps.pre-commit-virtualenv.outputs.python-executable }} -m pip install pre-commit==${{ inputs.version }} - name: Cache Pre-Commit Hooks - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache id: pre-commit-hooks-cache with: key: ${{ steps.pre-commit-virtualenv.outputs.cache-key }}|${{ inputs.version }}|${{ hashFiles('.pre-commit-config.yaml') }} diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index d060c9d4a1b..e640ffe86f7 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -44,7 +44,7 @@ runs: cache-seed: tools|${{ steps.venv-hash.outputs.venv-hash }} - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: ${{ inputs.cwd }}/.tools-venvs key: ${{ inputs.cache-prefix }}|${{ steps.venv-hash.outputs.venv-hash }} diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index d8303a92447..825d0401d12 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -37,7 +37,7 @@ runs: python3 -m pip install relenv==${{ inputs.version }} - name: Cache Relenv Data Directory - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: ${{ github.workspace }}/.relenv key: ${{ inputs.cache-seed }}|relenv|${{ inputs.version }}|${{ inputs.python-version }}|${{ inputs.platform }}|${{ inputs.arch }} diff --git a/.github/actions/setup-shellcheck/action.yml b/.github/actions/setup-shellcheck/action.yml index 9e5ad30365f..e6d0697933d 100644 --- a/.github/actions/setup-shellcheck/action.yml +++ b/.github/actions/setup-shellcheck/action.yml @@ -16,7 +16,7 @@ runs: steps: - name: Cache shellcheck Binary - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: /usr/local/bin/shellcheck key: ${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|shellcheck|${{ inputs.version }} diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml index 82389dbb448..26ad6fa8b65 100644 --- a/.github/workflows/build-deps-ci-action.yml +++ b/.github/workflows/build-deps-ci-action.yml @@ -74,7 +74,7 @@ jobs: - name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} id: nox-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: nox.linux.${{ matrix.arch }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|linux|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} @@ -183,7 +183,7 @@ jobs: - name: Cache nox.macos.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} id: nox-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: nox.macos.${{ matrix.arch }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|macos|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} @@ -272,7 +272,7 @@ jobs: - name: Cache nox.windows.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} id: nox-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: nox.windows.${{ matrix.arch }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|windows|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 415cdfc7d19..64c19ccb8a5 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -69,7 +69,7 @@ jobs: - name: Cache Python Tools Docs Virtualenv id: tools-venvs-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: .tools-venvs/docs key: ${{ inputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} diff --git a/.github/workflows/templates/build-deps-ci-action.yml.jinja b/.github/workflows/templates/build-deps-ci-action.yml.jinja index a43f7b43d01..1038d2b71dd 100644 --- a/.github/workflows/templates/build-deps-ci-action.yml.jinja +++ b/.github/workflows/templates/build-deps-ci-action.yml.jinja @@ -74,7 +74,7 @@ jobs: - name: Cache nox.linux.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} id: nox-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: nox.linux.${{ matrix.arch }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|linux|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} @@ -183,7 +183,7 @@ jobs: - name: Cache nox.macos.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} id: nox-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: nox.macos.${{ matrix.arch }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|macos|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} @@ -274,7 +274,7 @@ jobs: - name: Cache nox.windows.${{ matrix.arch }}.tar.* for session ${{ inputs.nox-session }} id: nox-dependencies-cache - uses: actions/cache@v3.3.1 + uses: ./.github/actions/cache with: path: nox.windows.${{ matrix.arch }}.tar.* key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|windows|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ inputs.nox-archive-hash }} From e60f6a4bdf2d2f5876213774af191c73c8eccc53 Mon Sep 17 00:00:00 2001 From: Felippe Burk Date: Mon, 18 Mar 2024 21:47:51 +0000 Subject: [PATCH 054/102] Initial commit of s3 caching --- .github/actions/build-onedir-deps/action.yml | 1 - .github/actions/cache/action.yml | 28 +++++++++++++++++-- .github/workflows/build-deps-ci-action.yml | 4 +++ .github/workflows/build-deps-onedir.yml | 7 ++++- .github/workflows/build-salt-onedir.yml | 2 ++ .github/workflows/nightly.yml | 12 ++++++++ .github/workflows/release.yml | 13 ++++++++- .github/workflows/staging.yml | 12 ++++++++ .../templates/build-deps-ci-action.yml.jinja | 4 +++ .../workflows/templates/build-repos.yml.jinja | 2 ++ .github/workflows/templates/release.yml.jinja | 15 +++++++++- .../test-package-downloads-action.yml.jinja | 6 ++++ .../test-package-downloads-action.yml | 6 ++++ .../workflows/test-packages-action-linux.yml | 1 + 14 files changed, 107 insertions(+), 6 deletions(-) diff --git a/.github/actions/build-onedir-deps/action.yml b/.github/actions/build-onedir-deps/action.yml index c34133bc369..1d36086ec0d 100644 --- a/.github/actions/build-onedir-deps/action.yml +++ b/.github/actions/build-onedir-deps/action.yml @@ -25,7 +25,6 @@ runs: using: composite steps: - - name: Cache Deps Onedir Package Directory id: onedir-pkg-cache uses: ./.github/actions/cache diff --git a/.github/actions/cache/action.yml b/.github/actions/cache/action.yml index ae416246d79..a11df9f02c7 100644 --- a/.github/actions/cache/action.yml +++ b/.github/actions/cache/action.yml @@ -34,7 +34,7 @@ inputs: outputs: cache-hit: description: 'A boolean value to indicate an exact match was found for the primary key' - value: ${{ steps.github-cache.outputs.cache-hit }} + value: ${{ steps.github-cache.outputs.cache-hit || steps.s3-cache.outputs.cache-hit }} runs: using: composite @@ -55,6 +55,7 @@ runs: - name: Cache Provided Path (GitHub Actions) id: github-cache + if: ${{ env.USE_S3_CACHE != 'true' }} uses: actions/cache@v4 with: path: ${{ env.GHA_CACHE_PATH }} @@ -66,11 +67,34 @@ runs: restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }} upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }} + - name: Configure AWS Credentials to access cache bucket + if: ${{ env.USE_S3_CACHE == 'true' }} + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + + - name: Cache Provided Path (S3) + if: ${{ env.USE_S3_CACHE == 'true' }} + id: s3-cache + env: + RUNS_ON_S3_BUCKET_CACHE: "salt-project-test-salt-github-actions-s3-cache" + AWS_REGION: 'us-west-2' + uses: runs-on/cache@v4 + with: + path: ${{ env.GHA_CACHE_PATH }} + key: ${{ env.GHA_CACHE_KEY }} + enableCrossOsArchive: ${{ env.GHA_CACHE_ENABLE_CROSS_OS_ARCHIVE }} + fail-on-cache-miss: ${{ env.GHA_CACHE_FAIL_ON_CACHE_MISS }} + lookup-only: ${{ env.GHA_CACHE_LOOKUP_ONLY }} + save-always: ${{ env.GHA_CACHE_SAVE_ALWAYS }} + restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }} + upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }} + - name: Verify 'fail-on-cache-miss' if: ${{ inputs.fail-on-cache-miss == 'true' }} shell: bash run: | - CACHE_HIT="${{ steps.github-cache.outputs.cache-hit }}" + CACHE_HIT="${{ steps.github-cache.outputs.cache-hit || steps.s3-cache.outputs.cache-hit }}" if [ "$CACHE_HIT" != "true" ]; then echo "No cache hit and fail-on-cache-miss is set to true." exit 1 diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml index 26ad6fa8b65..b16ec5e6ee8 100644 --- a/.github/workflows/build-deps-ci-action.yml +++ b/.github/workflows/build-deps-ci-action.yml @@ -53,6 +53,8 @@ jobs: - self-hosted - linux - bastion + env: + USE_S3_CACHE: 'true' timeout-minutes: 90 strategy: fail-fast: false @@ -253,6 +255,8 @@ jobs: - self-hosted - linux - bastion + env: + USE_S3_CACHE: 'true' timeout-minutes: 90 strategy: fail-fast: false diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 41d6f0260fe..8a13c7e3777 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -51,6 +51,8 @@ jobs: - self-hosted - linux - ${{ matrix.arch }} + env: + USE_S3_CACHE: 'true' steps: - name: "Throttle Builds" @@ -95,7 +97,8 @@ jobs: - arm64 runs-on: - ${{ matrix.arch == 'arm64' && 'macos-13-xlarge' || 'macos-12' }} - + env: + USE_S3_CACHE: 'false' steps: - name: "Throttle Builds" @@ -144,6 +147,8 @@ jobs: - x86 - amd64 runs-on: windows-latest + env: + USE_S3_CACHE: 'false' steps: - name: "Throttle Builds" diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 7e3a6dc2807..7913860cf7d 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -41,6 +41,8 @@ jobs: build-salt-linux: name: Linux if: ${{ inputs.self-hosted-runners }} + env: + USE_S3_CACHE: 'true' strategy: fail-fast: false matrix: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 9546f94f9c2..8f611d84b08 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2007,6 +2007,8 @@ jobs: - self-hosted - linux - repo-nightly + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-source-tarball @@ -2107,6 +2109,8 @@ jobs: - self-hosted - linux - repo-nightly + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2244,6 +2248,8 @@ jobs: - self-hosted - linux - repo-nightly + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2432,6 +2438,8 @@ jobs: - self-hosted - linux - repo-nightly + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2534,6 +2542,8 @@ jobs: - self-hosted - linux - repo-nightly + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2624,6 +2634,8 @@ jobs: - self-hosted - linux - repo-nightly + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-salt-onedir diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9bf81b4f897..29d3bb6b49f 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -52,6 +52,8 @@ jobs: - self-hosted - linux - repo-release + env: + USE_S3_CACHE: 'true' environment: release needs: - check-requirements @@ -125,6 +127,8 @@ jobs: - self-hosted - linux - repo-release + env: + USE_S3_CACHE: 'true' environment: release needs: - prepare-workflow @@ -191,6 +195,8 @@ jobs: - repo-release needs: - prepare-workflow + env: + USE_S3_CACHE: 'true' environment: release outputs: backup-complete: ${{ steps.backup.outputs.backup-complete }} @@ -220,12 +226,13 @@ jobs: - self-hosted - linux - repo-release + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - backup - download-onedir-artifact environment: release - steps: - name: Clone The Salt Repository uses: actions/checkout@v4 @@ -275,6 +282,8 @@ jobs: - self-hosted - linux - repo-release + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - backup @@ -389,6 +398,8 @@ jobs: - self-hosted - linux - repo-release + env: + USE_S3_CACHE: 'true' steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c7953f6a5fd..de37f95809f 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1852,6 +1852,8 @@ jobs: - self-hosted - linux - repo-staging + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-source-tarball @@ -1952,6 +1954,8 @@ jobs: - self-hosted - linux - repo-staging + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2089,6 +2093,8 @@ jobs: - self-hosted - linux - repo-staging + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2279,6 +2285,8 @@ jobs: - self-hosted - linux - repo-staging + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2381,6 +2389,8 @@ jobs: - self-hosted - linux - repo-staging + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-pkgs-onedir @@ -2471,6 +2481,8 @@ jobs: - self-hosted - linux - repo-staging + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - build-salt-onedir diff --git a/.github/workflows/templates/build-deps-ci-action.yml.jinja b/.github/workflows/templates/build-deps-ci-action.yml.jinja index 1038d2b71dd..de6dc03ba3c 100644 --- a/.github/workflows/templates/build-deps-ci-action.yml.jinja +++ b/.github/workflows/templates/build-deps-ci-action.yml.jinja @@ -53,6 +53,8 @@ jobs: - self-hosted - linux - bastion + env: + USE_S3_CACHE: 'true' timeout-minutes: 90 strategy: fail-fast: false @@ -253,6 +255,8 @@ jobs: - self-hosted - linux - bastion + env: + USE_S3_CACHE: 'true' timeout-minutes: 90 strategy: fail-fast: false diff --git a/.github/workflows/templates/build-repos.yml.jinja b/.github/workflows/templates/build-repos.yml.jinja index 6b8177498df..6584158d063 100644 --- a/.github/workflows/templates/build-repos.yml.jinja +++ b/.github/workflows/templates/build-repos.yml.jinja @@ -17,6 +17,8 @@ - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow <%- if type not in ("src", "onedir") %> diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 3fedd45cad8..efeef41f8a4 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -74,6 +74,8 @@ permissions: - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' environment: <{ gh_environment }> <%- if prepare_workflow_needs %> needs: @@ -157,6 +159,8 @@ permissions: - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' environment: <{ gh_environment }> needs: - prepare-workflow @@ -212,6 +216,8 @@ permissions: - repo-<{ gh_environment }> needs: - prepare-workflow + env: + USE_S3_CACHE: 'true' environment: <{ gh_environment }> outputs: backup-complete: ${{ steps.backup.outputs.backup-complete }} @@ -242,12 +248,13 @@ permissions: - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - backup - download-onedir-artifact environment: <{ gh_environment }> - steps: - name: Clone The Salt Repository uses: actions/checkout@v4 @@ -282,6 +289,8 @@ permissions: - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' needs: - prepare-workflow - backup @@ -396,6 +405,8 @@ permissions: - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' needs: - backup - release @@ -437,6 +448,8 @@ permissions: - self-hosted - linux - repo-<{ gh_environment }> + env: + USE_S3_CACHE: 'true' steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja index 979dba27512..ee5f5fbfc1b 100644 --- a/.github/workflows/templates/test-package-downloads-action.yml.jinja +++ b/.github/workflows/templates/test-package-downloads-action.yml.jinja @@ -61,6 +61,8 @@ jobs: - self-hosted - linux - bastion + env: + USE_S3_CACHE: 'true' environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: @@ -270,6 +272,8 @@ jobs: macos: name: MacOS runs-on: ${{ matrix.distro-slug }} + env: + USE_S3_CACHE: 'false' environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: @@ -469,6 +473,8 @@ jobs: windows: name: Windows + env: + USE_S3_CACHE: 'true' runs-on: - self-hosted - linux diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index 93d43fc4b76..dc3fb12a661 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -61,6 +61,8 @@ jobs: - self-hosted - linux - bastion + env: + USE_S3_CACHE: 'true' environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: @@ -379,6 +381,8 @@ jobs: macos: name: MacOS runs-on: ${{ matrix.distro-slug }} + env: + USE_S3_CACHE: 'false' environment: ${{ inputs.environment }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong strategy: @@ -585,6 +589,8 @@ jobs: windows: name: Windows + env: + USE_S3_CACHE: 'true' runs-on: - self-hosted - linux diff --git a/.github/workflows/test-packages-action-linux.yml b/.github/workflows/test-packages-action-linux.yml index ae85b3989ac..a2dc20f0080 100644 --- a/.github/workflows/test-packages-action-linux.yml +++ b/.github/workflows/test-packages-action-linux.yml @@ -69,6 +69,7 @@ env: PIP_EXTRA_INDEX_URL: https://pypi.org/simple PIP_DISABLE_PIP_VERSION_CHECK: "1" RAISE_DEPRECATIONS_RUNTIME_ERRORS: "1" + USE_S3_CACHE: 'true' jobs: From e0c876b9857c04e86aa828452e05da4572a13f6c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 18 Mar 2024 18:44:18 +0000 Subject: [PATCH 055/102] Get AWS regions and SPB_ENVIRONMENT from metadata endpoint --- .github/actions/cache/action.yml | 17 ++++++++++++++--- 1 file changed, 14 insertions(+), 3 deletions(-) diff --git a/.github/actions/cache/action.yml b/.github/actions/cache/action.yml index a11df9f02c7..020b9d1e6b8 100644 --- a/.github/actions/cache/action.yml +++ b/.github/actions/cache/action.yml @@ -67,18 +67,29 @@ runs: restore-keys: ${{ env.GHA_CACHE_RESTORE_KEYS }} upload-chunk-size: ${{ env.GHA_CACHE_UPLOAD_CHUNK_SIZE }} + - name: Get Salt Project GitHub Actions Bot Environment + if: ${{ env.USE_S3_CACHE == 'true' }} + shell: bash + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" | tee -a "$GITHUB_ENV" + REGION=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/placement/region) + echo "GHA_CACHE_AWS_REGION=$REGION" | tee -a "$GITHUB_ENV" + - name: Configure AWS Credentials to access cache bucket + id: creds if: ${{ env.USE_S3_CACHE == 'true' }} uses: aws-actions/configure-aws-credentials@v4 with: - aws-region: us-west-2 + aws-region: ${{ env.GHA_CACHE_AWS_REGION }} - name: Cache Provided Path (S3) if: ${{ env.USE_S3_CACHE == 'true' }} id: s3-cache env: - RUNS_ON_S3_BUCKET_CACHE: "salt-project-test-salt-github-actions-s3-cache" - AWS_REGION: 'us-west-2' + AWS_REGION: ${{ env.GHA_CACHE_AWS_REGION }} + RUNS_ON_S3_BUCKET_CACHE: salt-project-${{ env.SPB_ENVIRONMENT}}-salt-github-actions-s3-cache uses: runs-on/cache@v4 with: path: ${{ env.GHA_CACHE_PATH }} From 16bead7bcc90b24dabe5f0925667235fcf355323 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 20 Mar 2024 07:44:29 +0000 Subject: [PATCH 056/102] Remove log call which no longer makes sense --- tests/conftest.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/conftest.py b/tests/conftest.py index aef67bcb246..8384f4a0352 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -448,7 +448,6 @@ def pytest_collection_modifyitems(config, items): groups_collection_modifyitems(config, items) from_filenames_collection_modifyitems(config, items) - log.warning("Modifying collected tests to keep track of fixture usage") timeout_marker_tests_paths = ( str(PYTESTS_DIR / "pkg"), str(PYTESTS_DIR / "scenarios"), From ebb02d6c3a5908c3cf65409828d59044457a9705 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 20 Mar 2024 10:10:30 +0000 Subject: [PATCH 057/102] Adjust some tests to get passing nightly builds again --- .../modules/state/requisites/test_listen.py | 62 +++++---- tests/pytests/functional/states/test_user.py | 119 ++++++++++-------- 2 files changed, 93 insertions(+), 88 deletions(-) diff --git a/tests/pytests/functional/modules/state/requisites/test_listen.py b/tests/pytests/functional/modules/state/requisites/test_listen.py index 35b8c131e22..d0126e76d24 100644 --- a/tests/pytests/functional/modules/state/requisites/test_listen.py +++ b/tests/pytests/functional/modules/state/requisites/test_listen.py @@ -318,41 +318,35 @@ def test_listen_requisite_resolution_names(state, state_tree): assert "test_|-listener_service_|-crond_|-mod_watch" in ret -def test_onlyif_req(state, subtests): - onlyif = [{}] - with subtests.test(onlyif=onlyif): - ret = state.single( - name="onlyif test", fun="test.succeed_with_changes", onlyif=onlyif - ) - assert ret.result is True - assert ret.comment == "Success!" - - onlyif = [{"fun": "test.true"}] - with subtests.test(onlyif=onlyif): - ret = state.single( - name="onlyif test", fun="test.succeed_without_changes", onlyif=onlyif - ) - assert ret.result is True - assert not ret.changes - assert ret.comment == "Success!" - - onlyif = [{"fun": "test.false"}] - with subtests.test(onlyif=onlyif): - ret = state.single( - name="onlyif test", fun="test.fail_with_changes", onlyif=onlyif - ) - assert ret.result is True - assert not ret.changes - assert ret.comment == "onlyif condition is false" - - onlyif = [{"fun": "test.true"}] - with subtests.test(onlyif=onlyif): - ret = state.single( - name="onlyif test", fun="test.fail_with_changes", onlyif=onlyif - ) - assert ret.result is False +@pytest.mark.parametrize( + "fun,onlyif,result,comment,assert_changes", + ( + ("test.succeed_with_changes", [{}], True, "Success!", None), + ( + "test.succeed_without_changes", + [{"fun": "test.true"}], + True, + "Success!", + False, + ), + ( + "test.fail_with_changes", + [{"fun": "test.false"}], + True, + "onlyif condition is false", + False, + ), + ("test.fail_with_changes", [{"fun": "test.true"}], False, "Failure!", True), + ), +) +def test_onlyif_req(state, fun, onlyif, result, comment, assert_changes): + ret = state.single(name="onlyif test", fun=fun, onlyif=onlyif) + assert ret.result is result + assert ret.comment == comment + if assert_changes is True: assert ret.changes - assert ret.comment == "Failure!" + elif assert_changes is False: + assert not ret.changes def test_listen_requisite_not_exist(state, state_tree): diff --git a/tests/pytests/functional/states/test_user.py b/tests/pytests/functional/states/test_user.py index 43ae8513012..28be3cd8618 100644 --- a/tests/pytests/functional/states/test_user.py +++ b/tests/pytests/functional/states/test_user.py @@ -6,6 +6,7 @@ user present with custom homedir """ import pathlib +import random import shutil import sys @@ -43,6 +44,11 @@ def username(sminion): pass +@pytest.fixture +def guid(): + return random.randint(60000, 61000) + + @pytest.fixture def user_home(username, tmp_path): if salt.utils.platform.is_windows(): @@ -429,73 +435,78 @@ def test_user_present_change_optional_groups( assert user_info["groups"] == [group_1.name] +@pytest.fixture +def user_present_groups(states): + groups = ["testgroup1", "testgroup2"] + try: + yield groups + finally: + for group in groups: + ret = states.group.absent(name=group) + assert ret.result is True + + @pytest.mark.skip_unless_on_linux(reason="underlying functionality only runs on Linux") -def test_user_present_no_groups(modules, states, username): +def test_user_present_no_groups(modules, states, username, user_present_groups, guid): """ test user.present when groups arg is not included by the group is created in another state. Re-run the states to ensure there are not changes and it is idempotent. """ - groups = ["testgroup1", "testgroup2"] - try: - ret = states.group.present(name=username, gid=61121) - assert ret.result is True + ret = states.group.present(name=username, gid=guid) + assert ret.result is True - ret = states.user.present( - name=username, - uid=61121, - gid=61121, - ) - assert ret.result is True - assert ret.changes["groups"] == [username] - assert ret.changes["name"] == username + ret = states.user.present( + name=username, + uid=guid, + gid=guid, + ) + assert ret.result is True + assert ret.changes["groups"] == [username] + assert ret.changes["name"] == username - ret = states.group.present( - name=groups[0], - members=[username], - ) - assert ret.changes["members"] == [username] + ret = states.group.present( + name=user_present_groups[0], + members=[username], + ) + assert ret.changes["members"] == [username] - ret = states.group.present( - name=groups[1], - members=[username], - ) - assert ret.changes["members"] == [username] + ret = states.group.present( + name=user_present_groups[1], + members=[username], + ) + assert ret.changes["members"] == [username] - user_info = modules.user.info(username) - assert user_info - assert user_info["groups"] == [username, groups[0], groups[1]] + user_info = modules.user.info(username) + assert user_info + assert user_info["groups"] == [username, *user_present_groups] - # run again, expecting no changes - ret = states.group.present(name=username) - assert ret.result is True - assert ret.changes == {} + # run again, expecting no changes + ret = states.group.present(name=username) + assert ret.result is True + assert ret.changes == {} - ret = states.user.present( - name=username, - ) - assert ret.result is True - assert ret.changes == {} + ret = states.user.present( + name=username, + ) + assert ret.result is True + assert ret.changes == {} - ret = states.group.present( - name=groups[0], - members=[username], - ) - assert ret.result is True - assert ret.changes == {} + ret = states.group.present( + name=user_present_groups[0], + members=[username], + ) + assert ret.result is True + assert ret.changes == {} - ret = states.group.present( - name=groups[1], - members=[username], - ) - assert ret.result is True - assert ret.changes == {} + ret = states.group.present( + name=user_present_groups[1], + members=[username], + ) + assert ret.result is True + assert ret.changes == {} - user_info = modules.user.info(username) - assert user_info - assert user_info["groups"] == [username, groups[0], groups[1]] - finally: - for group in groups: - ret = states.group.absent(name=group) - assert ret.result is True + user_info = modules.user.info(username) + assert user_info + assert user_info["groups"] == [username, *user_present_groups] From e525bbada3c6fccb7289f7a999c59460e5e30142 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 20 Mar 2024 15:22:11 -0600 Subject: [PATCH 058/102] Remove vcredist and ucrt from Windows install --- pkg/windows/install_vs_buildtools.ps1 | 1 - pkg/windows/msi/Product.wxs | 13 -- pkg/windows/msi/README-how-to-build.md | 4 - pkg/windows/msi/build_pkg.ps1 | 33 ---- .../nsis/installer/Salt-Minion-Setup.nsi | 166 ------------------ pkg/windows/nsis/tests/setup.ps1 | 19 +- pkg/windows/prep_salt.ps1 | 35 ---- 7 files changed, 1 insertion(+), 270 deletions(-) diff --git a/pkg/windows/install_vs_buildtools.ps1 b/pkg/windows/install_vs_buildtools.ps1 index 238b0175e42..1d51058d2f1 100644 --- a/pkg/windows/install_vs_buildtools.ps1 +++ b/pkg/windows/install_vs_buildtools.ps1 @@ -103,7 +103,6 @@ if ( $install_build_tools ) { "--add Microsoft.VisualStudio.Component.Windows81SDK", ` "--add Microsoft.VisualStudio.Component.Windows10SDK.17763", ` "--add Microsoft.VisualStudio.Component.VC.140", ` - "--add Microsoft.Component.VC.Runtime.UCRTSDK", ` "--lang en-US", ` "--includeRecommended", ` "--quiet", ` diff --git a/pkg/windows/msi/Product.wxs b/pkg/windows/msi/Product.wxs index fb46c9d9d97..9893a32ab0f 100644 --- a/pkg/windows/msi/Product.wxs +++ b/pkg/windows/msi/Product.wxs @@ -246,17 +246,6 @@ IMCAC - Immediate Custom Action - It's immediate - - - - - - - - - - - @@ -269,8 +258,6 @@ IMCAC - Immediate Custom Action - It's immediate - - diff --git a/pkg/windows/msi/README-how-to-build.md b/pkg/windows/msi/README-how-to-build.md index 73ce6e6a107..34327ba3ab6 100644 --- a/pkg/windows/msi/README-how-to-build.md +++ b/pkg/windows/msi/README-how-to-build.md @@ -10,10 +10,6 @@ You need - .Net 3.5 SDK (for WiX)* - [Wix 3](http://wixtoolset.org/releases/)** - [Build tools 2015](https://www.microsoft.com/en-US/download/confirmation.aspx?id=48159)** -- Microsoft_VC140_CRT_x64.msm from Visual Studio 2015** -- Microsoft_VC140_CRT_x86.msm from Visual Studio 2015** -- Microsoft_VC120_CRT_x64.msm from Visual Studio 2013** -- Microsoft_VC120_CRT_x86.msm from Visual Studio 2013** Notes: - * `build.cmd` will open `optionalfeatures` if necessary. diff --git a/pkg/windows/msi/build_pkg.ps1 b/pkg/windows/msi/build_pkg.ps1 index 6a6176a2d4e..67069c049fd 100644 --- a/pkg/windows/msi/build_pkg.ps1 +++ b/pkg/windows/msi/build_pkg.ps1 @@ -73,8 +73,6 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) { # Script Variables #------------------------------------------------------------------------------- -$WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir" -$DEPS_URL = "https://repo.saltproject.io/windows/dependencies" $PROJECT_DIR = $(git rev-parse --show-toplevel) $BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" $BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" @@ -121,21 +119,6 @@ Write-Host "- Architecture: $BUILD_ARCH" Write-Host "- Salt Version: $Version" Write-Host $("-" * 80) -#------------------------------------------------------------------------------- -# Ensure cache dir exists -#------------------------------------------------------------------------------- - -if ( ! (Test-Path -Path $WEBCACHE_DIR) ) { - Write-Host "Creating cache directory: " -NoNewline - New-Item -ItemType directory -Path $WEBCACHE_DIR | Out-Null - if ( Test-Path -Path $WEBCACHE_DIR ) { - Write-Result "Success" -ForegroundColor Green - } else { - Write-Result "Failed" -ForegroundColor Red - exit 1 - } -} - #------------------------------------------------------------------------------- # Ensure WIX environment variable is set, if not refresh and check again #------------------------------------------------------------------------------- @@ -156,21 +139,6 @@ if ( ! "$env:WIX" ) { } } -#------------------------------------------------------------------------------- -# Caching VC++ Runtimes -#------------------------------------------------------------------------------- - -$RUNTIMES = @( - ("Microsoft_VC120_CRT_x64.msm", "64", "15FD10A495287505184B8913DF8D6A9CA461F44F78BC74115A0C14A5EDD1C9A7"), - ("Microsoft_VC120_CRT_x86.msm", "32", "26340B393F52888B908AC3E67B935A80D390E1728A31FF38EBCEC01117EB2579"), - ("Microsoft_VC140_CRT_x64.msm", "64", "E1344D5943FB2BBB7A56470ED0B7E2B9B212CD9210D3CC6FA82BC3DA8F11EDA8"), - ("Microsoft_VC140_CRT_x86.msm", "32", "0D36CFE6E9ABD7F530DBAA4A83841CDBEF9B2ADCB625614AF18208FDCD6B92A4") -) -$RUNTIMES | ForEach-Object { - $name, $arch, $hash = $_ - VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$arch/$name" "$hash" -} - #------------------------------------------------------------------------------- # Converting to MSI Version #------------------------------------------------------------------------------- @@ -531,7 +499,6 @@ Push-Location $SCRIPT_DIR -dDisplayVersion="$Version" ` -dInternalVersion="$INTERNAL_VERSION" ` -dDISCOVER_INSTALLDIR="$($DISCOVER_INSTALLDIR[$i])" ` - -dWEBCACHE_DIR="$WEBCACHE_DIR" ` -dDISCOVER_CONFDIR="$DISCOVER_CONFDIR" ` -ext "$($ENV:WIX)bin\WixUtilExtension.dll" ` -ext "$($ENV:WIX)bin\WixUIExtension.dll" ` diff --git a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi index 2a957056f51..59ca96c76f6 100644 --- a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi +++ b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi @@ -524,171 +524,6 @@ InstallDirRegKey HKLM "${PRODUCT_DIR_REGKEY}" "" ShowInstDetails show ShowUnInstDetails show - -Section -copy_prereqs - # Copy prereqs to the Plugins Directory - # These files are downloaded by build_pkg.bat - # This directory gets removed upon completion - SetOutPath "$PLUGINSDIR\" - File /r "..\..\prereqs\" -SectionEnd - -# Check if the Windows 10 Universal C Runtime (KB2999226) is installed. Python -# 3 needs the updated ucrt on Windows 8.1/2012R2 and lower. They are installed -# via KB2999226, but we're not going to patch the system here. Instead, we're -# going to copy the .dll files to the \salt\bin directory -Section -install_ucrt - - Var /GLOBAL UcrtFileName - - # Get the Major.Minor version Number - # Windows 10 introduced CurrentMajorVersionNumber - ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows NT\CurrentVersion" \ - CurrentMajorVersionNumber - - # Windows 10/2016 will return a value here, skip to the end if returned - StrCmp $R0 '' lbl_needs_ucrt 0 - - # Found Windows 10 - detailPrint "KB2999226 does not apply to this machine" - goto lbl_done - - lbl_needs_ucrt: - # UCRT only needed on Windows Server 2012R2/Windows 8.1 and below. The - # first ReadRegStr command above should have skipped to lbl_done if on - # Windows 10 box - - # Is the update already installed - ClearErrors - - # Use WMI to check if it's installed - detailPrint "Checking for existing UCRT (KB2999226) installation" - nsExec::ExecToStack 'cmd /q /c wmic qfe get hotfixid | findstr "^KB2999226"' - # Clean up the stack - Pop $R0 # Gets the ErrorCode - Pop $R1 # Gets the stdout, which should be KB2999226 if it's installed - - # If it returned KB2999226 it's already installed - StrCmp $R1 'KB2999226' lbl_done - - detailPrint "UCRT (KB2999226) not found" - - # Use RunningX64 here to get the Architecture for the system running the - # installer. - ${If} ${RunningX64} - StrCpy $UcrtFileName "ucrt_x64.zip" - ${Else} - StrCpy $UcrtFileName "ucrt_x86.zip" - ${EndIf} - - ClearErrors - - detailPrint "Unzipping UCRT dll files to $INSTDIR\Scripts" - CreateDirectory $INSTDIR\Scripts - nsisunz::UnzipToLog "$PLUGINSDIR\$UcrtFileName" "$INSTDIR\Scripts" - - # Clean up the stack - Pop $R0 # Get Error - - ${IfNot} $R0 == "success" - detailPrint "error: $R0" - Sleep 3000 - ${Else} - detailPrint "UCRT dll files copied successfully" - ${EndIf} - - lbl_done: - -SectionEnd - - -# Check and install Visual C++ redist 2013 packages -# Hidden section (-) to install VCRedist -Section -install_vcredist_2013 - - Var /GLOBAL VcRedistName - Var /GLOBAL VcRedistReg - - # Only install 64bit VCRedist on 64bit machines - # Use RunningX64 here to get the Architecture for the system running the - # installer. - ${If} ${RunningX64} - StrCpy $VcRedistName "vcredist_x64_2013" - StrCpy $VcRedistReg "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\12.0\VC\Runtimes\x64" - ${Else} - StrCpy $VcRedistName "vcredist_x86_2013" - StrCpy $VcRedistReg "SOFTWARE\Microsoft\VisualStudio\12.0\VC\Runtimes\x86" - ${EndIf} - - # Detecting VCRedist Installation - detailPrint "Checking for $VcRedistName..." - ReadRegDword $0 HKLM $VcRedistReg "Installed" - StrCmp $0 "1" +2 0 - Call InstallVCRedist - -SectionEnd - - -Function InstallVCRedist - detailPrint "System requires $VcRedistName" - MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 \ - "$VcRedistName is currently not installed. Would you like to \ - install?" \ - /SD IDYES IDYES InstallVcRedist - - detailPrint "$VcRedistName not installed" - detailPrint ">>>Installation aborted by user<<<" - MessageBox MB_ICONEXCLAMATION \ - "$VcRedistName not installed. Aborted by user.$\n$\n\ - Installer will now close." \ - /SD IDOK - Quit - - InstallVcRedist: - - # If an output variable is specified ($0 in the case below), ExecWait - # sets the variable with the exit code (and only sets the error flag if - # an error occurs; if an error occurs, the contents of the user - # variable are undefined). - # http://nsis.sourceforge.net/Reference/ExecWait - ClearErrors - detailPrint "Installing $VcRedistName..." - ExecWait '"$PLUGINSDIR\$VcRedistName.exe" /install /quiet /norestart' $0 - - IfErrors 0 CheckVcRedistErrorCode - - detailPrint "An error occurred during installation of $VcRedistName" - MessageBox MB_OK|MB_ICONEXCLAMATION \ - "$VcRedistName failed to install. Try installing the package \ - manually.$\n$\n\ - The installer will now close." \ - /SD IDOK - Quit - - CheckVcRedistErrorCode: - # Check for Reboot Error Code (3010) - ${If} $0 == 3010 - detailPrint "$VcRedistName installed but requires a restart to complete." - detailPrint "Reboot and run Salt install again" - MessageBox MB_OK|MB_ICONINFORMATION \ - "$VcRedistName installed but requires a restart to complete." \ - /SD IDOK - - # Check for any other errors - ${ElseIfNot} $0 == 0 - detailPrint "An error occurred during installation of $VcRedistName" - detailPrint "Error: $0" - MessageBox MB_OK|MB_ICONEXCLAMATION \ - "$VcRedistName failed to install. Try installing the package \ - mnually.$\n\ - ErrorCode: $0$\n\ - The installer will now close." \ - /SD IDOK - ${EndIf} - -FunctionEnd - - Section "MainSection" SEC01 ${If} $MoveExistingConfig == 1 @@ -763,7 +598,6 @@ Function .onInit ${EndIf} ${EndIf} - InitPluginsDir Call parseInstallerCommandLineSwitches # Uninstall msi-installed salt diff --git a/pkg/windows/nsis/tests/setup.ps1 b/pkg/windows/nsis/tests/setup.ps1 index ddebf709be0..c5d8b7459a6 100644 --- a/pkg/windows/nsis/tests/setup.ps1 +++ b/pkg/windows/nsis/tests/setup.ps1 @@ -35,7 +35,6 @@ $SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").Directo $WINDOWS_DIR = "$PROJECT_DIR\pkg\windows" $NSIS_DIR = "$WINDOWS_DIR\nsis" $BUILDENV_DIR = "$WINDOWS_DIR\buildenv" -$PREREQS_DIR = "$WINDOWS_DIR\prereqs" $NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe" #------------------------------------------------------------------------------- @@ -50,8 +49,7 @@ Write-Host $("-" * 80) # Setup Directories #------------------------------------------------------------------------------- -$directories = "$PREREQS_DIR", - "$BUILDENV_DIR", +$directories = "$BUILDENV_DIR", "$BUILDENV_DIR\configs" $directories | ForEach-Object { if ( ! (Test-Path -Path "$_") ) { @@ -70,21 +68,6 @@ $directories | ForEach-Object { # Create binaries #------------------------------------------------------------------------------- -$prereq_files = "vcredist_x86_2013.exe", - "vcredist_x64_2013.exe", - "ucrt_x86.zip", - "ucrt_x64.zip" -$prereq_files | ForEach-Object { - Write-Host "Creating $_`: " -NoNewline - Set-Content -Path "$PREREQS_DIR\$_" -Value "binary" - if ( Test-Path -Path "$PREREQS_DIR\$_" ) { - Write-Result "Success" - } else { - Write-Result "Failed" -ForegroundColor Red - exit 1 - } -} - $binary_files = "ssm.exe", "python.exe" $binary_files | ForEach-Object { diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index 74497648482..7bcde5b60c7 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -62,7 +62,6 @@ if ( $BuildDir ) { } else { $BUILD_DIR = "$SCRIPT_DIR\buildenv" } -$PREREQ_DIR = "$SCRIPT_DIR\prereqs" $SCRIPTS_DIR = "$BUILD_DIR\Scripts" $BUILD_CONF_DIR = "$BUILD_DIR\configs" $SITE_PKGS_DIR = "$BUILD_DIR\Lib\site-packages" @@ -126,17 +125,6 @@ if ( Test-Path -Path $BUILD_CONF_DIR) { } } -if ( Test-Path -Path $PREREQ_DIR ) { - Write-Host "Removing PreReq Directory: " -NoNewline - Remove-Item -Path $PREREQ_DIR -Recurse -Force - if ( ! (Test-Path -Path $PREREQ_DIR) ) { - Write-Result "Success" -ForegroundColor Green - } else { - Write-Result "Failed" -ForegroundColor Red - exit 1 - } -} - #------------------------------------------------------------------------------- # Staging the Build Environment #------------------------------------------------------------------------------- @@ -183,29 +171,6 @@ $scripts | ForEach-Object { } } -# Copy VCRedist 2013 to the prereqs directory -New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null -Write-Host "Copying VCRedist 2013 $ARCH_X to prereqs: " -NoNewline -$file = "vcredist_$ARCH_X`_2013.exe" -Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file" -if ( Test-Path -Path "$PREREQ_DIR\$file" ) { - Write-Result "Success" -ForegroundColor Green -} else { - Write-Result "Failed" -ForegroundColor Red - exit 1 -} - -# Copy Universal C Runtimes to the prereqs directory -Write-Host "Copying Universal C Runtimes $ARCH_X to prereqs: " -NoNewline -$file = "ucrt_$ARCH_X.zip" -Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file" -if ( Test-Path -Path "$PREREQ_DIR\$file" ) { - Write-Result "Success" -ForegroundColor Green -} else { - Write-Result "Failed" -ForegroundColor Red - exit 1 -} - #------------------------------------------------------------------------------- # Remove binaries not needed by Salt #------------------------------------------------------------------------------- From 212e7211c6853576f0a75b77a94d084bb0c706db Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 21 Mar 2024 16:14:52 +0000 Subject: [PATCH 059/102] Upgrade to ``pytest-salt-factories==1.0.0`` --- requirements/pytest.txt | 2 +- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/cloud.txt | 2 +- requirements/static/ci/py3.11/darwin.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.11/windows.txt | 2 +- requirements/static/ci/py3.12/cloud.txt | 2 +- requirements/static/ci/py3.12/darwin.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.12/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- 25 files changed, 25 insertions(+), 25 deletions(-) diff --git a/requirements/pytest.txt b/requirements/pytest.txt index 5e6c895538d..d53137d6601 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -2,7 +2,7 @@ mock >= 3.0.0 # PyTest docker pytest >= 7.2.0 -pytest-salt-factories >= 1.0.0rc29 +pytest-salt-factories >= 1.0.0 pytest-helpers-namespace >= 2019.1.8 pytest-subtests pytest-timeout >= 2.3.1 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index d1c5c0003e5..8b2b724ea02 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.4.3 +pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 99134aa1e28..a12c9dd1136 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -362,7 +362,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 645c76dffae..9b4a26e9c4b 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -366,7 +366,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 7fcc83a90d9..cb91db86959 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -400,7 +400,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index d92459bfebf..43270c3a9eb 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -321,7 +321,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index ede2f925742..6049488e0c8 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.4.3 +pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 08b3f931cf9..5fe774ff19a 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index e1506e881d7..95679fe7c16 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -367,7 +367,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index b7d998a0042..1f843813884 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -399,7 +399,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 2e41dc98310..28ba26de656 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -322,7 +322,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index c701214f7c9..3a53b608fef 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -521,7 +521,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index fa8fdedaa59..57e7821892f 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index 7dacca3b902..f64a4f56789 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -367,7 +367,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 45278b4665c..b5a06a73fb5 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -399,7 +399,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index b880be50cdb..6aacccef4c2 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -322,7 +322,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index e6caac1247a..e8662fb128d 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.4.3 +pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 1bdc85ad2c7..94b016c1072 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -370,7 +370,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index e3f526a38bd..9604959fda4 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -397,7 +397,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index c839a1f5a20..cadaf44a4cf 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -325,7 +325,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index fbed2f95c05..f2bbb436946 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.4.3 +pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 511f5b0a0a4..f1a1c52e602 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -362,7 +362,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index d27ad858afc..69ff9ebcb6d 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -366,7 +366,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index ee6ee056c67..10042421e25 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -393,7 +393,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index f3eb93920b5..23d59912b5c 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -321,7 +321,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories From fccecc695ca9d4b10cb237d2dc4d9f1df9c71f1e Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 21 Mar 2024 16:56:53 +0000 Subject: [PATCH 060/102] Stop ignoring the host keys, query them and write them down as accepted. --- tests/conftest.py | 17 ++++++++++++++++- .../integration/netapi/test_ssh_client.py | 12 ++++++------ 2 files changed, 22 insertions(+), 7 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index bc2dd0eda25..8c45640de5e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1399,7 +1399,21 @@ def sshd_server(salt_factories, sshd_config_dir, salt_master, grains): @pytest.fixture(scope="module") -def salt_ssh_roster_file(sshd_server, salt_master): +def known_hosts_file(sshd_server, salt_master, salt_factories): + with pytest.helpers.temp_file( + "ssh-known-hosts", + "\n".join(sshd_server.get_host_keys()), + salt_factories.tmp_root_dir, + ) as known_hosts_file, pytest.helpers.temp_file( + "master.d/ssh-known-hosts.conf", + f"known_hosts_file: {known_hosts_file}", + salt_master.config_dir, + ): + yield known_hosts_file + + +@pytest.fixture(scope="module") +def salt_ssh_roster_file(sshd_server, salt_master, known_hosts_file): roster_contents = """ localhost: host: 127.0.0.1 @@ -1412,6 +1426,7 @@ def salt_ssh_roster_file(sshd_server, salt_master): ) if salt.utils.platform.is_darwin(): roster_contents += " set_path: $PATH:/usr/local/bin/\n" + with pytest.helpers.temp_file( "roster", roster_contents, salt_master.config_dir ) as roster_file: diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py index 1de9f078773..11f25d9bd47 100644 --- a/tests/pytests/integration/netapi/test_ssh_client.py +++ b/tests/pytests/integration/netapi/test_ssh_client.py @@ -1,3 +1,5 @@ +import logging + import pytest import salt.netapi @@ -19,9 +21,12 @@ pytestmark = [ pytest.mark.timeout_unless_on_windows(120), ] +log = logging.getLogger(__name__) + @pytest.fixture -def client_config(client_config): +def client_config(client_config, known_hosts_file): + client_config["known_hosts_file"] = str(known_hosts_file) client_config["netapi_enable_clients"] = ["ssh"] return client_config @@ -74,7 +79,6 @@ def test_ssh(client, auth_creds, salt_ssh_roster_file, rosters_dir, ssh_priv_key "client": "ssh", "tgt": "localhost", "fun": "test.ping", - "ignore_host_keys": True, "roster_file": str(salt_ssh_roster_file), "rosters": [rosters_dir], "ssh_priv": ssh_priv_key, @@ -194,7 +198,6 @@ def test_shell_inject_tgt(client, salt_ssh_roster_file, tmp_path, salt_auto_acco "eauth": "auto", "username": salt_auto_account.username, "password": salt_auto_account.password, - "ignore_host_keys": True, } ret = client.run(low) assert path.exists() is False @@ -249,7 +252,6 @@ def test_shell_inject_ssh_port( "roster_file": str(salt_ssh_roster_file), "rosters": "/", "ssh_port": f"hhhhh|id>{path} #", - "ignore_host_keys": True, } ret = client.run(low) assert path.exists() is False @@ -277,7 +279,6 @@ def test_shell_inject_remote_port_forwards( "eauth": "auto", "username": salt_auto_account.username, "password": salt_auto_account.password, - "ignore_host_keys": True, } ret = client.run(low) assert path.exists() is False @@ -324,7 +325,6 @@ def test_ssh_auth_bypass(client, salt_ssh_roster_file): "roster_file": str(salt_ssh_roster_file), "rosters": "/", "eauth": "xx", - "ignore_host_keys": True, } with pytest.raises(EauthAuthenticationError): client.run(low) From ccfd8ea364366e451547afb721078f845c33707b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 21 Mar 2024 16:14:52 +0000 Subject: [PATCH 061/102] Upgrade to ``pytest-salt-factories==1.0.0`` --- requirements/pytest.txt | 2 +- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/cloud.txt | 2 +- requirements/static/ci/py3.11/darwin.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.11/windows.txt | 2 +- requirements/static/ci/py3.12/cloud.txt | 2 +- requirements/static/ci/py3.12/darwin.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.12/windows.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) diff --git a/requirements/pytest.txt b/requirements/pytest.txt index 5e6c895538d..d53137d6601 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -2,7 +2,7 @@ mock >= 3.0.0 # PyTest docker pytest >= 7.2.0 -pytest-salt-factories >= 1.0.0rc29 +pytest-salt-factories >= 1.0.0 pytest-helpers-namespace >= 2019.1.8 pytest-subtests pytest-timeout >= 2.3.1 diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index c948ec499d8..2e33e0b4545 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -474,7 +474,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 1a08e11b99e..4e0e5401243 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -332,7 +332,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index d4ff409bf90..990aa698296 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -325,7 +325,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 1a427909d2a..c00e7ab5eaf 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -342,7 +342,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 5085f969dcd..731ba77a15e 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -310,7 +310,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index be47bc56fcd..59838032f9e 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -438,7 +438,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 3318f874774..83b07114fcd 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -303,7 +303,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 0c5db4047d5..2c60650e8e6 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -302,7 +302,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index f65e23eef63..29d77fbb9c5 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -319,7 +319,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index dd6c569671a..a035acff7ea 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -306,7 +306,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 8c59f4b425a..40d463d9b2a 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -438,7 +438,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 7968842ee75..c5dff6ccbfc 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -303,7 +303,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index 73edffa6cdf..dccb1763f5f 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -302,7 +302,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index 5f698e3728a..580e5f60ddb 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -319,7 +319,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index b460b78018b..e5949cfd1ed 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -306,7 +306,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 4eebe8c3fce..49d1316122f 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -521,7 +521,7 @@ pytest-httpserver==1.0.6 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 907bfe38c62..d87fada59cf 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -365,7 +365,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 77044b1661d..20ca93af2bd 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -377,7 +377,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index f8dce51150f..c708594a9d7 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -324,7 +324,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index f8e34628f78..c52d9b94590 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -508,7 +508,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index e53274c32bd..b4b0596ec19 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -352,7 +352,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index e9bd555c0f7..b80af4171c7 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -364,7 +364,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index da0991bdb2c..8d57b564593 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -311,7 +311,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 68491d77b5d..452e2242050 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -510,7 +510,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 57ae604f2da..e7b69e46952 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -361,7 +361,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 722061db973..50569ed6c7d 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -354,7 +354,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 593557f9482..f2e57a53ea5 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -364,7 +364,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index bfaefd86fc3..810ac69b451 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -312,7 +312,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc29 +pytest-salt-factories==1.0.0 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories From b2fe6c45dbe3ba0c8983079c4ed4202affc112ad Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 21 Mar 2024 16:18:34 +0000 Subject: [PATCH 062/102] Bump functional test splits to 4 --- tools/ci.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci.py b/tools/ci.py index f2126b7fa9a..192cfee48f1 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -654,7 +654,7 @@ def matrix( """ _matrix = [] _splits = { - "functional": 3, + "functional": 4, "integration": 5, "scenarios": 1, "unit": 4, From 65e5c4f2273322079f08564aaeb0fd1ad2951399 Mon Sep 17 00:00:00 2001 From: Tyler Levy Conde Date: Thu, 21 Mar 2024 12:05:02 -0600 Subject: [PATCH 063/102] Fix: Prevent full system upgrade in pkg.installed for Arch Linux This change modifies the logic in the pacman module to only append the '-u' flag to the pacman command if sysupgrade is explicitly set to True. This prevents the pkg.installed state from triggering a full system upgrade by default on Arch Linux systems. --- changelog/65200.fixed.md | 1 + salt/modules/pacmanpkg.py | 2 +- tests/pytests/unit/modules/test_pacmanpkg.py | 118 ++++--------------- 3 files changed, 24 insertions(+), 97 deletions(-) create mode 100644 changelog/65200.fixed.md diff --git a/changelog/65200.fixed.md b/changelog/65200.fixed.md new file mode 100644 index 00000000000..9da348e5e4e --- /dev/null +++ b/changelog/65200.fixed.md @@ -0,0 +1 @@ +Prevent full system upgrade on single package install for Arch Linux diff --git a/salt/modules/pacmanpkg.py b/salt/modules/pacmanpkg.py index 57df5f72cb8..40e56f93733 100644 --- a/salt/modules/pacmanpkg.py +++ b/salt/modules/pacmanpkg.py @@ -552,7 +552,7 @@ def install( cmd.append("-S") if refresh is True: cmd.append("-y") - if sysupgrade is True or (sysupgrade is None and refresh is True): + if sysupgrade is True: cmd.append("-u") cmd.extend(["--noprogressbar", "--noconfirm", "--needed"]) wildcards = [] diff --git a/tests/pytests/unit/modules/test_pacmanpkg.py b/tests/pytests/unit/modules/test_pacmanpkg.py index 796624dbeb7..0b4d457f4d8 100644 --- a/tests/pytests/unit/modules/test_pacmanpkg.py +++ b/tests/pytests/unit/modules/test_pacmanpkg.py @@ -6,11 +6,14 @@ import pytest import salt.modules.pacmanpkg as pacman from tests.support.mock import MagicMock, patch +import salt.utils.systemd @pytest.fixture def configure_loader_modules(): - return {pacman: {}} + return { + pacman: {}, + } def test_list_pkgs(): @@ -63,106 +66,29 @@ def test_list_pkgs_as_list(): assert stringifymock.call_count == 0 -def test_list_pkgs_no_context(): +def test_pacman_install_sysupgrade_flag(): """ - Test if it list the packages currently installed in a dict + Test if the pacman.install function appends the '-u' flag only when sysupgrade is True """ - cmdmock = MagicMock(return_value="A 1.0\nB 2.0") - sortmock = MagicMock() - stringifymock = MagicMock() - mock_ret = {"A": ["1.0"], "B": ["2.0"]} + mock_parse_targets = MagicMock(return_value=({"somepkg": None}, "repository")) + mock_has_scope = MagicMock(return_value=False) + mock_list_pkgs = MagicMock(return_value={"somepkg": "1.0"}) + mock_run_all = MagicMock(return_value={"retcode": 0, "stderr": ""}) + with patch.dict( pacman.__salt__, { - "cmd.run": cmdmock, - "pkg_resource.add_pkg": lambda pkgs, name, version: pkgs.setdefault( - name, [] - ).append(version), - "pkg_resource.sort_pkglist": sortmock, - "pkg_resource.stringify": stringifymock, + "cmd.run_all": mock_run_all, + "pkg_resource.parse_targets": mock_parse_targets, + "config.get": MagicMock(return_value=True), }, - ), patch.object(pacman, "_list_pkgs_from_context") as list_pkgs_context_mock: - assert pacman.list_pkgs() == mock_ret - - pkgs = pacman.list_pkgs(versions_as_list=True, use_context=False) - list_pkgs_context_mock.assert_not_called() - list_pkgs_context_mock.reset_mock() - - pkgs = pacman.list_pkgs(versions_as_list=True, use_context=False) - list_pkgs_context_mock.assert_not_called() - list_pkgs_context_mock.reset_mock() - - -def test_group_list(): - """ - Test if it lists the available groups - """ - - def cmdlist(cmd, **kwargs): - """ - Handle several different commands being run - """ - if cmd == ["pacman", "-Sgg"]: - return ( - "group-a pkg1\ngroup-a pkg2\ngroup-f pkg9\ngroup-c pkg3\ngroup-b pkg4" - ) - elif cmd == ["pacman", "-Qg"]: - return "group-a pkg1\ngroup-b pkg4" - else: - return f"Untested command ({cmd}, {kwargs})!" - - cmdmock = MagicMock(side_effect=cmdlist) - - sortmock = MagicMock() - with patch.dict( - pacman.__salt__, {"cmd.run": cmdmock, "pkg_resource.sort_pkglist": sortmock} + ), patch.object(salt.utils.systemd, "has_scope", mock_has_scope), patch.object( + pacman, "list_pkgs", mock_list_pkgs ): - assert pacman.group_list() == { - "available": ["group-c", "group-f"], - "installed": ["group-b"], - "partially_installed": ["group-a"], - } + pacman.install(name="somepkg", sysupgrade=True) + args, _ = pacman.__salt__["cmd.run_all"].call_args + assert "-u" in args[0] - -def test_group_info(): - """ - Test if it shows the packages in a group - """ - - def cmdlist(cmd, **kwargs): - """ - Handle several different commands being run - """ - if cmd == ["pacman", "-Sgg", "testgroup"]: - return "testgroup pkg1\ntestgroup pkg2" - else: - return f"Untested command ({cmd}, {kwargs})!" - - cmdmock = MagicMock(side_effect=cmdlist) - - sortmock = MagicMock() - with patch.dict( - pacman.__salt__, {"cmd.run": cmdmock, "pkg_resource.sort_pkglist": sortmock} - ): - assert pacman.group_info("testgroup")["default"] == ["pkg1", "pkg2"] - - -def test_group_diff(): - """ - Test if it shows the difference between installed and target group contents - """ - - listmock = MagicMock(return_value={"A": ["1.0"], "B": ["2.0"]}) - groupmock = MagicMock( - return_value={ - "mandatory": [], - "optional": [], - "default": ["A", "C"], - "conditional": [], - } - ) - with patch.dict( - pacman.__salt__, {"pkg.list_pkgs": listmock, "pkg.group_info": groupmock} - ): - results = pacman.group_diff("testgroup") - assert results["default"] == {"installed": ["A"], "not installed": ["C"]} + pacman.install(name="somepkg", sysupgrade=None, refresh=True) + args, _ = pacman.__salt__["cmd.run_all"].call_args + assert "-u" not in args[0] From 1f08a3c1911310fdbaa601421bd68efc621fabb9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 21 Mar 2024 16:18:34 +0000 Subject: [PATCH 064/102] Bump functional test splits to 4 --- tests/pytests/unit/modules/test_pacmanpkg.py | 111 ++++++++++++++++++- 1 file changed, 107 insertions(+), 4 deletions(-) diff --git a/tests/pytests/unit/modules/test_pacmanpkg.py b/tests/pytests/unit/modules/test_pacmanpkg.py index 0b4d457f4d8..cf76d83c95d 100644 --- a/tests/pytests/unit/modules/test_pacmanpkg.py +++ b/tests/pytests/unit/modules/test_pacmanpkg.py @@ -5,15 +5,13 @@ import pytest import salt.modules.pacmanpkg as pacman -from tests.support.mock import MagicMock, patch import salt.utils.systemd +from tests.support.mock import MagicMock, patch @pytest.fixture def configure_loader_modules(): - return { - pacman: {}, - } + return {pacman: {}} def test_list_pkgs(): @@ -66,6 +64,111 @@ def test_list_pkgs_as_list(): assert stringifymock.call_count == 0 +def test_list_pkgs_no_context(): + """ + Test if it list the packages currently installed in a dict + """ + cmdmock = MagicMock(return_value="A 1.0\nB 2.0") + sortmock = MagicMock() + stringifymock = MagicMock() + mock_ret = {"A": ["1.0"], "B": ["2.0"]} + with patch.dict( + pacman.__salt__, + { + "cmd.run": cmdmock, + "pkg_resource.add_pkg": lambda pkgs, name, version: pkgs.setdefault( + name, [] + ).append(version), + "pkg_resource.sort_pkglist": sortmock, + "pkg_resource.stringify": stringifymock, + }, + ), patch.object(pacman, "_list_pkgs_from_context") as list_pkgs_context_mock: + assert pacman.list_pkgs() == mock_ret + + pkgs = pacman.list_pkgs(versions_as_list=True, use_context=False) + list_pkgs_context_mock.assert_not_called() + list_pkgs_context_mock.reset_mock() + + pkgs = pacman.list_pkgs(versions_as_list=True, use_context=False) + list_pkgs_context_mock.assert_not_called() + list_pkgs_context_mock.reset_mock() + + +def test_group_list(): + """ + Test if it lists the available groups + """ + + def cmdlist(cmd, **kwargs): + """ + Handle several different commands being run + """ + if cmd == ["pacman", "-Sgg"]: + return ( + "group-a pkg1\ngroup-a pkg2\ngroup-f pkg9\ngroup-c pkg3\ngroup-b pkg4" + ) + elif cmd == ["pacman", "-Qg"]: + return "group-a pkg1\ngroup-b pkg4" + else: + return f"Untested command ({cmd}, {kwargs})!" + + cmdmock = MagicMock(side_effect=cmdlist) + + sortmock = MagicMock() + with patch.dict( + pacman.__salt__, {"cmd.run": cmdmock, "pkg_resource.sort_pkglist": sortmock} + ): + assert pacman.group_list() == { + "available": ["group-c", "group-f"], + "installed": ["group-b"], + "partially_installed": ["group-a"], + } + + +def test_group_info(): + """ + Test if it shows the packages in a group + """ + + def cmdlist(cmd, **kwargs): + """ + Handle several different commands being run + """ + if cmd == ["pacman", "-Sgg", "testgroup"]: + return "testgroup pkg1\ntestgroup pkg2" + else: + return f"Untested command ({cmd}, {kwargs})!" + + cmdmock = MagicMock(side_effect=cmdlist) + + sortmock = MagicMock() + with patch.dict( + pacman.__salt__, {"cmd.run": cmdmock, "pkg_resource.sort_pkglist": sortmock} + ): + assert pacman.group_info("testgroup")["default"] == ["pkg1", "pkg2"] + + +def test_group_diff(): + """ + Test if it shows the difference between installed and target group contents + """ + + listmock = MagicMock(return_value={"A": ["1.0"], "B": ["2.0"]}) + groupmock = MagicMock( + return_value={ + "mandatory": [], + "optional": [], + "default": ["A", "C"], + "conditional": [], + } + ) + with patch.dict( + pacman.__salt__, {"pkg.list_pkgs": listmock, "pkg.group_info": groupmock} + ): + results = pacman.group_diff("testgroup") + assert results["default"] == {"installed": ["A"], "not installed": ["C"]} + + def test_pacman_install_sysupgrade_flag(): """ Test if the pacman.install function appends the '-u' flag only when sysupgrade is True From 52ace3bc346b7740c3a735b779187dd81332436f Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Tue, 19 Mar 2024 12:56:07 -0600 Subject: [PATCH 065/102] Fix issue with win_user.add win_useradd.add now allows you to add new users that have only integers in the username. It also adds tests for the win_useradd module. --- changelog/53363.fixed.md | 2 + salt/modules/win_useradd.py | 125 ++++--- .../pytests/unit/modules/test_win_useradd.py | 344 ++++++++++++++++++ 3 files changed, 419 insertions(+), 52 deletions(-) create mode 100644 changelog/53363.fixed.md create mode 100644 tests/pytests/unit/modules/test_win_useradd.py diff --git a/changelog/53363.fixed.md b/changelog/53363.fixed.md new file mode 100644 index 00000000000..9ab50a6424c --- /dev/null +++ b/changelog/53363.fixed.md @@ -0,0 +1,2 @@ +``user.add`` on Windows now allows you to add user names that contain all +numeric characters diff --git a/salt/modules/win_useradd.py b/salt/modules/win_useradd.py index a9e9b2629b6..7fe48727d08 100644 --- a/salt/modules/win_useradd.py +++ b/salt/modules/win_useradd.py @@ -22,6 +22,7 @@ Module for managing Windows Users. This currently only works with local user accounts, not domain accounts """ +import ctypes import logging import shlex import time @@ -30,6 +31,7 @@ from datetime import datetime import salt.utils.args import salt.utils.dateutils import salt.utils.platform +import salt.utils.win_reg import salt.utils.winapi from salt.exceptions import CommandExecutionError @@ -82,7 +84,7 @@ def add( Add a user to the minion. Args: - name (str): User name + name (str): The username for the new account password (str, optional): User's password in plain text. @@ -106,7 +108,7 @@ def add( logs on. Returns: - bool: True if successful. False is unsuccessful. + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -116,10 +118,10 @@ def add( """ user_info = {} if name: - user_info["name"] = name + user_info["name"] = str(name) else: return False - user_info["password"] = password + user_info["password"] = str(password) user_info["priv"] = win32netcon.USER_PRIV_USER user_info["home_dir"] = home user_info["comment"] = description @@ -160,13 +162,13 @@ def update( ): # pylint: disable=anomalous-backslash-in-string """ - Updates settings for the windows user. Name is the only required parameter. + Updates settings for the Windows user. Name is the only required parameter. Settings will only be changed if the parameter is passed a value. .. versionadded:: 2015.8.0 Args: - name (str): The user name to update. + name (str): The username to update. password (str, optional): New user password in plain text. @@ -206,7 +208,7 @@ def update( changing the password. False allows the user to change the password. Returns: - bool: True if successful. False is unsuccessful. + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -219,7 +221,7 @@ def update( # Make sure the user exists # Return an object containing current settings for the user try: - user_info = win32net.NetUserGetInfo(None, name, 4) + user_info = win32net.NetUserGetInfo(None, str(name), 4) except win32net.error as exc: log.error("Failed to update user %s", name) log.error("nbr: %s", exc.winerror) @@ -230,7 +232,7 @@ def update( # Check parameters to update # Update the user object with new settings if password: - user_info["password"] = password + user_info["password"] = str(password) if home: user_info["home_dir"] = home if homedrive: @@ -251,7 +253,7 @@ def update( dt_obj = salt.utils.dateutils.date_cast(expiration_date) except (ValueError, RuntimeError): return f"Invalid Date/Time Format: {expiration_date}" - user_info["acct_expires"] = time.mktime(dt_obj.timetuple()) + user_info["acct_expires"] = int(dt_obj.timestamp()) if expired is not None: if expired: user_info["password_expired"] = 1 @@ -263,6 +265,7 @@ def update( else: user_info["flags"] &= ~win32netcon.UF_ACCOUNTDISABLE if unlock_account is not None: + # We can only unlock with this flag... we can't unlock if unlock_account: user_info["flags"] &= ~win32netcon.UF_LOCKOUT if password_never_expires is not None: @@ -278,7 +281,7 @@ def update( # Apply new settings try: - win32net.NetUserSetInfo(None, name, 4, user_info) + win32net.NetUserSetInfo(None, str(name), 4, user_info) except win32net.error as exc: log.error("Failed to update user %s", name) log.error("nbr: %s", exc.winerror) @@ -305,7 +308,7 @@ def delete(name, purge=False, force=False): user out and delete user. Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -315,7 +318,7 @@ def delete(name, purge=False, force=False): """ # Check if the user exists try: - user_info = win32net.NetUserGetInfo(None, name, 4) + user_info = win32net.NetUserGetInfo(None, str(name), 4) except win32net.error as exc: log.error("User not found: %s", name) log.error("nbr: %s", exc.winerror) @@ -382,7 +385,7 @@ def delete(name, purge=False, force=False): # And finally remove the user account try: - win32net.NetUserDel(None, name) + win32net.NetUserDel(None, str(name)) except win32net.error as exc: log.error("Failed to delete user %s", name) log.error("nbr: %s", exc.winerror) @@ -398,7 +401,7 @@ def getUserSid(username): Get the Security ID for the user Args: - username (str): The user name for which to look up the SID + username (str): The username for which to look up the SID Returns: str: The user SID @@ -424,12 +427,12 @@ def setpassword(name, password): Set the user's password Args: - name (str): The user name for which to set the password + name (str): The username for which to set the password password (str): The new password Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -445,12 +448,12 @@ def addgroup(name, group): Add user to a group Args: - name (str): The user name to add to the group + name (str): The username to add to the group group (str): The name of the group to which to add the user Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -458,7 +461,7 @@ def addgroup(name, group): salt '*' user.addgroup jsnuffy 'Power Users' """ - name = shlex.quote(name) + name = shlex.quote(str(name)) group = shlex.quote(group).lstrip("'").rstrip("'") user = info(name) @@ -478,12 +481,12 @@ def removegroup(name, group): Remove user from a group Args: - name (str): The user name to remove from the group + name (str): The username to remove from the group group (str): The name of the group from which to remove the user Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -491,7 +494,7 @@ def removegroup(name, group): salt '*' user.removegroup jsnuffy 'Power Users' """ - name = shlex.quote(name) + name = shlex.quote(str(name)) group = shlex.quote(group).lstrip("'").rstrip("'") user = info(name) @@ -519,7 +522,7 @@ def chhome(name, home, **kwargs): home (str): The new location of the home directory Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -562,7 +565,7 @@ def chprofile(name, profile): profile (str): The new location of the profile Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -578,12 +581,12 @@ def chfullname(name, fullname): Change the full name of the user Args: - name (str): The user name for which to change the full name + name (str): The username for which to change the full name fullname (str): The new value for the full name Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -600,7 +603,7 @@ def chgroups(name, groups, append=True): member of only the specified groups Args: - name (str): The user name for which to change groups + name (str): The username for which to change groups groups (str, list): A single group or a list of groups to assign to the user. For multiple groups this can be a comma delimited string or a @@ -611,7 +614,7 @@ def chgroups(name, groups, append=True): only. Default is True. Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: @@ -623,21 +626,31 @@ def chgroups(name, groups, append=True): groups = groups.split(",") groups = [x.strip(" *") for x in groups] - ugrps = set(list_groups(name)) - if ugrps == set(groups): - return True + current_groups = set(list_groups(name)) + expected_groups = set() - name = shlex.quote(name) + name = shlex.quote(str(name)) if not append: - for group in ugrps: + # We don't want to append to the list, remove groups not in the new set + # of groups + for group in current_groups: group = shlex.quote(group).lstrip("'").rstrip("'") if group not in groups: cmd = f'net localgroup "{group}" {name} /delete' __salt__["cmd.run_all"](cmd, python_shell=True) + else: + expected_groups.add(group) + else: + # We're appending to the current list of groups. If they already match + # then bail + if current_groups == set(groups): + return True + else: + expected_groups = current_groups.union(set(groups)) for group in groups: - if group in ugrps: + if group in current_groups: continue group = shlex.quote(group).lstrip("'").rstrip("'") cmd = f'net localgroup "{group}" {name} /add' @@ -646,8 +659,9 @@ def chgroups(name, groups, append=True): log.error(out["stdout"]) return False - agrps = set(list_groups(name)) - return len(ugrps - agrps) == 0 + new_groups = set(list_groups(name)) + + return len(expected_groups - new_groups) == 0 def info(name): @@ -677,6 +691,7 @@ def info(name): - last_logon - account_disabled - account_locked + - expiration_date - password_never_expires - disallow_change_password - gid @@ -690,14 +705,14 @@ def info(name): ret = {} items = {} try: - items = win32net.NetUserGetInfo(None, name, 4) + items = win32net.NetUserGetInfo(None, str(name), 4) except win32net.error: pass if items: groups = [] try: - groups = win32net.NetUserGetLocalGroups(None, name) + groups = win32net.NetUserGetLocalGroups(None, str(name)) except win32net.error: pass @@ -722,9 +737,15 @@ def info(name): ret["last_logon"] = datetime.fromtimestamp(items["last_logon"]).strftime( "%Y-%m-%d %H:%M:%S" ) - ret["expiration_date"] = datetime.fromtimestamp(items["acct_expires"]).strftime( - "%Y-%m-%d %H:%M:%S" - ) + + # If the value is -1 or 0xFFFFFFFF, it is set to never expire + if items["acct_expires"] == ctypes.c_ulong(win32netcon.TIMEQ_FOREVER).value: + ret["expiration_date"] = "Never" + else: + ret["expiration_date"] = datetime.fromtimestamp( + items["acct_expires"] + ).strftime("%Y-%m-%d %H:%M:%S") + ret["expired"] = items["password_expired"] == 1 if not ret["profile"]: ret["profile"] = _get_userprofile_from_registry(name, ret["uid"]) @@ -765,17 +786,17 @@ def _get_userprofile_from_registry(user, sid): registry Args: - user (str): The user name, used in debug message + user (str): The username, used in debug message sid (str): The sid to lookup in the registry Returns: str: Profile directory """ - profile_dir = __utils__["reg.read_value"]( - "HKEY_LOCAL_MACHINE", - f"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{sid}", - "ProfileImagePath", + profile_dir = salt.utils.win_reg.read_value( + hive="HKEY_LOCAL_MACHINE", + key=f"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\ProfileList\\{sid}", + vname="ProfileImagePath", )["vdata"] log.debug('user %s with sid=%s profile is located at "%s"', user, sid, profile_dir) return profile_dir @@ -786,7 +807,7 @@ def list_groups(name): Return a list of groups the named user belongs to Args: - name (str): The user name for which to list groups + name (str): The username for which to list groups Returns: list: A list of groups to which the user belongs @@ -829,9 +850,9 @@ def getent(refresh=False): return __context__["user.getent"] ret = [] - for user in __salt__["user.list_users"](): + for user in list_users(): stuff = {} - user_info = __salt__["user.info"](user) + user_info = info(user) stuff["gid"] = "" stuff["groups"] = user_info["groups"] @@ -885,12 +906,12 @@ def rename(name, new_name): Change the username for a named user Args: - name (str): The user name to change + name (str): The username to change new_name (str): The new name for the current user Returns: - bool: True if successful, otherwise False + bool: ``True`` if successful, otherwise ``False``. CLI Example: diff --git a/tests/pytests/unit/modules/test_win_useradd.py b/tests/pytests/unit/modules/test_win_useradd.py new file mode 100644 index 00000000000..6084c5bc566 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_useradd.py @@ -0,0 +1,344 @@ +import pytest +from saltfactories.utils import random_string + +import salt.modules.cmdmod +import salt.modules.win_useradd as user +import salt.utils.data +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.destructive_test, + pytest.mark.skip_unless_on_windows, + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture +def configure_loader_modules(): + return {user: {"__salt__": {"cmd.run_all": salt.modules.cmdmod.run_all}}} + + +@pytest.fixture +def username_str(): + _username = random_string("test-account-", uppercase=False) + try: + yield _username + finally: + try: + user.delete(_username, purge=True, force=True) + except Exception: # pylint: disable=broad-except + # The point here is just system cleanup. It can fail if no account was created + pass + + +@pytest.fixture +def username_int(): + _username = random_string("", uppercase=False, lowercase=False, digits=True) + try: + yield _username + finally: + try: + user.delete(_username, purge=True, force=True) + except Exception: # pylint: disable=broad-except + # The point here is just system cleanup. It can fail if no account was created + pass + + +@pytest.fixture +def account_str(username_str): + with pytest.helpers.create_account(username=username_str) as account: + user.addgroup(account.username, "Users") + yield account + + +@pytest.fixture +def account_int(username_int): + with pytest.helpers.create_account(username=username_int) as account: + user.addgroup(account.username, "Users") + yield account + + +def test_add_str(username_str): + ret = user.add(name=username_str) + assert ret is True + assert username_str in user.list_users() + + +def test_add_int(username_int): + ret = user.add(name=username_int) + assert ret is True + assert username_int in user.list_users() + + +def test_addgroup_str(account_str): + ret = user.addgroup(account_str.username, "Backup Operators") + assert ret is True + ret = user.info(account_str.username) + assert "Backup Operators" in ret["groups"] + + +def test_addgroup_int(account_int): + ret = user.addgroup(account_int.username, "Backup Operators") + assert ret is True + ret = user.info(account_int.username) + assert "Backup Operators" in ret["groups"] + + +def test_chfullname_str(account_str): + ret = user.chfullname(account_str.username, "New Full Name") + assert ret is True + ret = user.info(account_str.username) + assert ret["fullname"] == "New Full Name" + + +def test_chfullname_int(account_int): + ret = user.chfullname(account_int.username, "New Full Name") + assert ret is True + ret = user.info(account_int.username) + assert ret["fullname"] == "New Full Name" + + +def test_chgroups_single_str(account_str): + groups = ["Backup Operators"] + ret = user.chgroups(account_str.username, groups=groups) + assert ret is True + ret = user.info(account_str.username) + groups.append("Users") + assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + + +def test_chgroups_single_int(account_int): + groups = ["Backup Operators"] + ret = user.chgroups(account_int.username, groups=groups) + assert ret is True + ret = user.info(account_int.username) + groups.append("Users") + assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + + +def test_chgroups_list_str(account_str): + groups = ["Backup Operators", "Guests"] + ret = user.chgroups(account_str.username, groups=groups) + assert ret is True + ret = user.info(account_str.username) + groups.append("Users") + assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + + +def test_chgroups_list_int(account_int): + groups = ["Backup Operators", "Guests"] + ret = user.chgroups(account_int.username, groups=groups) + assert ret is True + ret = user.info(account_int.username) + groups.append("Users") + assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + + +def test_chgroups_list_append_false_str(account_str): + groups = ["Backup Operators", "Guests"] + ret = user.chgroups(account_str.username, groups=groups, append=False) + assert ret is True + ret = user.info(account_str.username) + assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + + +def test_chgroups_list_append_false_int(account_int): + groups = ["Backup Operators", "Guests"] + ret = user.chgroups(account_int.username, groups=groups, append=False) + assert ret is True + ret = user.info(account_int.username) + assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + + +def test_chhome_str(account_str): + home = r"C:\spongebob\squarepants" + ret = user.chhome(name=account_str.username, home=home) + assert ret is True + ret = user.info(name=account_str.username) + assert ret["home"] == home + + +def test_chhome_int(account_int): + home = r"C:\spongebob\squarepants" + ret = user.chhome(name=account_int.username, home=home) + assert ret is True + ret = user.info(name=account_int.username) + assert ret["home"] == home + + +def test_chprofile_str(account_str): + profile = r"C:\spongebob\squarepants" + ret = user.chprofile(name=account_str.username, profile=profile) + assert ret is True + ret = user.info(name=account_str.username) + assert ret["profile"] == profile + + +def test_chprofile_int(account_int): + profile = r"C:\spongebob\squarepants" + ret = user.chprofile(name=account_int.username, profile=profile) + assert ret is True + ret = user.info(name=account_int.username) + assert ret["profile"] == profile + + +def test_delete_str(account_str): + ret = user.delete(name=account_str.username) + assert ret is True + assert user.info(name=account_str.username) == {} + + +def test_delete_int(account_int): + ret = user.delete(name=account_int.username) + assert ret is True + assert user.info(name=account_int.username) == {} + + +def test_getUserSig_str(account_str): + ret = user.getUserSid(account_str.username) + assert ret.startswith("S-1-5") + + +def test_getUserSig_int(account_int): + ret = user.getUserSid(account_int.username) + assert ret.startswith("S-1-5") + + +def test_info_str(account_str): + ret = user.info(account_str.username) + assert ret["name"] == account_str.username + assert ret["uid"].startswith("S-1-5") + + +def test_info_int(account_int): + ret = user.info(account_int.username) + assert ret["name"] == account_int.username + assert ret["uid"].startswith("S-1-5") + + +def test_list_groups_str(account_str): + ret = user.list_groups(account_str.username) + assert ret == ["Users"] + + +def test_list_groups_int(account_int): + ret = user.list_groups(account_int.username) + assert ret == ["Users"] + + +def test_list_users(): + ret = user.list_users() + assert "Administrator" in ret + + +def test_removegroup_str(account_str): + ret = user.removegroup(account_str.username, "Users") + assert ret is True + ret = user.info(account_str.username) + assert ret["groups"] == [] + + +def test_removegroup_int(account_int): + ret = user.removegroup(account_int.username, "Users") + assert ret is True + ret = user.info(account_int.username) + assert ret["groups"] == [] + + +def test_rename_str(account_str): + new_name = random_string("test-account-", uppercase=False) + ret = user.rename(name=account_str.username, new_name=new_name) + assert ret is True + assert new_name in user.list_users() + # Let's set it back so that it gets cleaned up... + ret = user.rename(name=new_name, new_name=account_str.username) + assert ret is True + + +def test_rename_str_missing(account_str): + missing = random_string("test-account-", uppercase=False) + with pytest.raises(CommandExecutionError): + user.rename(name=missing, new_name="spongebob") + + +def test_rename_str_existing(account_str): + new_existing = random_string("test-account-", uppercase=False) + ret = user.add(name=new_existing) + assert ret is True + with pytest.raises(CommandExecutionError): + user.rename(name=account_str.username, new_name=new_existing) + # We need to clean this up because it wasn't created in a fixture + ret = user.delete(name=new_existing, purge=True, force=True) + assert ret is True + assert new_existing not in user.list_users() + + +def test_rename_int(account_int): + new_name = random_string("", uppercase=False, lowercase=False, digits=True) + ret = user.rename(name=account_int.username, new_name=new_name) + assert ret is True + assert new_name in user.list_users() + # Let's set it back so that it gets cleaned up... + ret = user.rename(name=new_name, new_name=account_int.username) + assert ret is True + + +def test_rename_int_missing(account_int): + missing = random_string("", uppercase=False, lowercase=False, digits=True) + with pytest.raises(CommandExecutionError): + user.rename(name=missing, new_name="spongebob") + + +def test_rename_int_existing(account_int): + new_existing = random_string("", uppercase=False, lowercase=False, digits=True) + ret = user.add(name=new_existing) + assert ret is True + with pytest.raises(CommandExecutionError): + user.rename(name=account_int.username, new_name=new_existing) + # We need to clean this up because it wasn't created in a fixture + ret = user.delete(name=new_existing, purge=True, force=True) + assert ret is True + assert new_existing not in user.list_users() + + +def test_setpassword_str(account_str): + ret = user.setpassword(account_str.username, password="Sup3rS3cret") + # We have no way of verifying the password was changed on Windows, so the + # best we can do is check that the command completed successfully + assert ret is True + + +def test_setpassword_int(account_int): + ret = user.setpassword(account_int.username, password="Sup3rS3cret") + # We have no way of verifying the password was changed on Windows, so the + # best we can do is check that the command completed successfully + assert ret is True + + +@pytest.mark.parametrize( + "value_name, new_value, info_field, expected", + [ + ("description", "New description", "", None), + ("homedrive", "H:", "", None), + ("logonscript", "\\\\server\\script.cmd", "", None), + ("expiration_date", "3/19/2024", "", "2024-03-19 00:00:00"), + ("expiration_date", "Never", "", None), + ("expired", True, "", None), + ("expired", False, "", None), + ("account_disabled", True, "", None), + ("account_disabled", False, "", None), + ("unlock_account", True, "account_locked", False), + ("password_never_expires", True, "", None), + ("password_never_expires", False, "", None), + ("disallow_change_password", True, "", None), + ("disallow_change_password", False, "", None), + ], +) +def test_update_str(value_name, new_value, info_field, expected, account_str): + setting = {value_name: new_value} + ret = user.update(account_str.username, **setting) + assert ret is True + ret = user.info(account_str.username) + info_field = info_field if info_field else value_name + expected = expected if expected is not None else new_value + assert ret[info_field] == expected From b7dd23ef4ebd8eea98eddb570e57c845d44b0ed4 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 20 Mar 2024 08:12:34 -0600 Subject: [PATCH 066/102] Move test_win_useradd.py to functional --- tests/pytests/{unit => functional}/modules/test_win_useradd.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/pytests/{unit => functional}/modules/test_win_useradd.py (100%) diff --git a/tests/pytests/unit/modules/test_win_useradd.py b/tests/pytests/functional/modules/test_win_useradd.py similarity index 100% rename from tests/pytests/unit/modules/test_win_useradd.py rename to tests/pytests/functional/modules/test_win_useradd.py From 979457d93519de9c1bbe89c07ad4cc2c59f2b180 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 20 Mar 2024 08:39:55 -0600 Subject: [PATCH 067/102] Fix some test issues, use sort --- salt/modules/win_dacl.py | 12 +++++----- salt/modules/win_useradd.py | 22 +++++++++++++++++-- .../functional/modules/test_win_useradd.py | 21 +++++++++--------- 3 files changed, 36 insertions(+), 19 deletions(-) diff --git a/salt/modules/win_dacl.py b/salt/modules/win_dacl.py index b8b5aca3dc3..18dbcdd84f6 100644 --- a/salt/modules/win_dacl.py +++ b/salt/modules/win_dacl.py @@ -329,7 +329,7 @@ class daclConstants: return path -def _getUserSid(user): +def _get_user_sid(user): """ return a state error dictionary, with 'sid' as a field if it could be returned if user is None, sid will also be None @@ -413,7 +413,7 @@ def get(path, objectType, user=None): """ ret = {"Path": path, "ACLs": []} - sidRet = _getUserSid(user) + sidRet = _get_user_sid(user) if path and objectType: dc = daclConstants() @@ -458,7 +458,7 @@ def add_ace(path, objectType, user, permission, acetype, propagation): acetype = acetype.strip().upper() propagation = propagation.strip().upper() - sidRet = _getUserSid(user) + sidRet = _get_user_sid(user) if not sidRet["result"]: return sidRet permissionbit = dc.getPermissionBit(objectTypeBit, permission) @@ -555,7 +555,7 @@ def rm_ace(path, objectType, user, permission=None, acetype=None, propagation=No if check_ace(path, objectType, user, permission, acetype, propagation, True)[ "Exists" ]: - sidRet = _getUserSid(user) + sidRet = _get_user_sid(user) if not sidRet["result"]: return sidRet permissionbit = ( @@ -804,7 +804,7 @@ def check_inheritance(path, objectType, user=None): ret = {"result": False, "Inheritance": False, "comment": ""} - sidRet = _getUserSid(user) + sidRet = _get_user_sid(user) dc = daclConstants() objectType = dc.getObjectTypeBit(objectType) @@ -880,7 +880,7 @@ def check_ace( dc.getPropagationBit(objectTypeBit, propagation) if propagation else None ) - sidRet = _getUserSid(user) + sidRet = _get_user_sid(user) if not sidRet["result"]: return sidRet diff --git a/salt/modules/win_useradd.py b/salt/modules/win_useradd.py index 7fe48727d08..1a667909afd 100644 --- a/salt/modules/win_useradd.py +++ b/salt/modules/win_useradd.py @@ -31,6 +31,7 @@ from datetime import datetime import salt.utils.args import salt.utils.dateutils import salt.utils.platform +import salt.utils.versions import salt.utils.win_reg import salt.utils.winapi from salt.exceptions import CommandExecutionError @@ -370,7 +371,7 @@ def delete(name, purge=False, force=False): # Remove the User Profile directory if purge: try: - sid = getUserSid(name) + sid = get_user_sid(name) win32profile.DeleteProfile(sid) except pywintypes.error as exc: (number, context, message) = exc.args @@ -397,6 +398,23 @@ def delete(name, purge=False, force=False): def getUserSid(username): + """ + Deprecated function. Please use get_user_sid instead + + CLI Example: + + .. code-block:: bash + + salt '*' user.get_user_sid jsnuffy + """ + salt.utils.versions.warn_until( + version=3009, + message="'getUserSid' is being deprecated. Please use get_user_sid instead", + ) + return get_user_sid(username) + + +def get_user_sid(username): """ Get the Security ID for the user @@ -410,7 +428,7 @@ def getUserSid(username): .. code-block:: bash - salt '*' user.getUserSid jsnuffy + salt '*' user.get_user_sid jsnuffy """ domain = win32api.GetComputerName() if username.find("\\") != -1: diff --git a/tests/pytests/functional/modules/test_win_useradd.py b/tests/pytests/functional/modules/test_win_useradd.py index 6084c5bc566..37572676d54 100644 --- a/tests/pytests/functional/modules/test_win_useradd.py +++ b/tests/pytests/functional/modules/test_win_useradd.py @@ -3,7 +3,6 @@ from saltfactories.utils import random_string import salt.modules.cmdmod import salt.modules.win_useradd as user -import salt.utils.data from salt.exceptions import CommandExecutionError pytestmark = [ @@ -104,7 +103,7 @@ def test_chgroups_single_str(account_str): assert ret is True ret = user.info(account_str.username) groups.append("Users") - assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + assert ret["groups"].sort() == groups.sort() def test_chgroups_single_int(account_int): @@ -113,7 +112,7 @@ def test_chgroups_single_int(account_int): assert ret is True ret = user.info(account_int.username) groups.append("Users") - assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + assert ret["groups"].sort() == groups.sort() def test_chgroups_list_str(account_str): @@ -122,7 +121,7 @@ def test_chgroups_list_str(account_str): assert ret is True ret = user.info(account_str.username) groups.append("Users") - assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + assert ret["groups"].sort() == groups.sort() def test_chgroups_list_int(account_int): @@ -131,7 +130,7 @@ def test_chgroups_list_int(account_int): assert ret is True ret = user.info(account_int.username) groups.append("Users") - assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + assert ret["groups"].sort() == groups.sort() def test_chgroups_list_append_false_str(account_str): @@ -139,7 +138,7 @@ def test_chgroups_list_append_false_str(account_str): ret = user.chgroups(account_str.username, groups=groups, append=False) assert ret is True ret = user.info(account_str.username) - assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + assert ret["groups"].sort() == groups.sort() def test_chgroups_list_append_false_int(account_int): @@ -147,7 +146,7 @@ def test_chgroups_list_append_false_int(account_int): ret = user.chgroups(account_int.username, groups=groups, append=False) assert ret is True ret = user.info(account_int.username) - assert salt.utils.data.compare_lists(ret["groups"], groups) == {} + assert ret["groups"].sort() == groups.sort() def test_chhome_str(account_str): @@ -194,13 +193,13 @@ def test_delete_int(account_int): assert user.info(name=account_int.username) == {} -def test_getUserSig_str(account_str): - ret = user.getUserSid(account_str.username) +def test_get_user_sid_str(account_str): + ret = user.get_user_sid(account_str.username) assert ret.startswith("S-1-5") -def test_getUserSig_int(account_int): - ret = user.getUserSid(account_int.username) +def test_get_user_sid_int(account_int): + ret = user.get_user_sid(account_int.username) assert ret.startswith("S-1-5") From 4dcad68d9bc6fd14aa1588a1fb6f17d88d49d044 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 20 Mar 2024 10:36:25 -0600 Subject: [PATCH 068/102] Don't use configure_loader_modules --- .../functional/modules/test_win_useradd.py | 88 +++++++++---------- 1 file changed, 43 insertions(+), 45 deletions(-) diff --git a/tests/pytests/functional/modules/test_win_useradd.py b/tests/pytests/functional/modules/test_win_useradd.py index 37572676d54..5396c39bb70 100644 --- a/tests/pytests/functional/modules/test_win_useradd.py +++ b/tests/pytests/functional/modules/test_win_useradd.py @@ -1,8 +1,6 @@ import pytest from saltfactories.utils import random_string -import salt.modules.cmdmod -import salt.modules.win_useradd as user from salt.exceptions import CommandExecutionError pytestmark = [ @@ -12,13 +10,13 @@ pytestmark = [ ] -@pytest.fixture -def configure_loader_modules(): - return {user: {"__salt__": {"cmd.run_all": salt.modules.cmdmod.run_all}}} +@pytest.fixture(scope="module") +def user(modules): + return modules.user @pytest.fixture -def username_str(): +def username_str(user): _username = random_string("test-account-", uppercase=False) try: yield _username @@ -31,7 +29,7 @@ def username_str(): @pytest.fixture -def username_int(): +def username_int(user): _username = random_string("", uppercase=False, lowercase=False, digits=True) try: yield _username @@ -44,60 +42,60 @@ def username_int(): @pytest.fixture -def account_str(username_str): +def account_str(user, username_str): with pytest.helpers.create_account(username=username_str) as account: user.addgroup(account.username, "Users") yield account @pytest.fixture -def account_int(username_int): +def account_int(user, username_int): with pytest.helpers.create_account(username=username_int) as account: user.addgroup(account.username, "Users") yield account -def test_add_str(username_str): +def test_add_str(user, username_str): ret = user.add(name=username_str) assert ret is True assert username_str in user.list_users() -def test_add_int(username_int): +def test_add_int(user, username_int): ret = user.add(name=username_int) assert ret is True assert username_int in user.list_users() -def test_addgroup_str(account_str): +def test_addgroup_str(user, account_str): ret = user.addgroup(account_str.username, "Backup Operators") assert ret is True ret = user.info(account_str.username) assert "Backup Operators" in ret["groups"] -def test_addgroup_int(account_int): +def test_addgroup_int(user, account_int): ret = user.addgroup(account_int.username, "Backup Operators") assert ret is True ret = user.info(account_int.username) assert "Backup Operators" in ret["groups"] -def test_chfullname_str(account_str): +def test_chfullname_str(user, account_str): ret = user.chfullname(account_str.username, "New Full Name") assert ret is True ret = user.info(account_str.username) assert ret["fullname"] == "New Full Name" -def test_chfullname_int(account_int): +def test_chfullname_int(user, account_int): ret = user.chfullname(account_int.username, "New Full Name") assert ret is True ret = user.info(account_int.username) assert ret["fullname"] == "New Full Name" -def test_chgroups_single_str(account_str): +def test_chgroups_single_str(user, account_str): groups = ["Backup Operators"] ret = user.chgroups(account_str.username, groups=groups) assert ret is True @@ -106,7 +104,7 @@ def test_chgroups_single_str(account_str): assert ret["groups"].sort() == groups.sort() -def test_chgroups_single_int(account_int): +def test_chgroups_single_int(user, account_int): groups = ["Backup Operators"] ret = user.chgroups(account_int.username, groups=groups) assert ret is True @@ -115,7 +113,7 @@ def test_chgroups_single_int(account_int): assert ret["groups"].sort() == groups.sort() -def test_chgroups_list_str(account_str): +def test_chgroups_list_str(user, account_str): groups = ["Backup Operators", "Guests"] ret = user.chgroups(account_str.username, groups=groups) assert ret is True @@ -124,7 +122,7 @@ def test_chgroups_list_str(account_str): assert ret["groups"].sort() == groups.sort() -def test_chgroups_list_int(account_int): +def test_chgroups_list_int(user, account_int): groups = ["Backup Operators", "Guests"] ret = user.chgroups(account_int.username, groups=groups) assert ret is True @@ -133,7 +131,7 @@ def test_chgroups_list_int(account_int): assert ret["groups"].sort() == groups.sort() -def test_chgroups_list_append_false_str(account_str): +def test_chgroups_list_append_false_str(user, account_str): groups = ["Backup Operators", "Guests"] ret = user.chgroups(account_str.username, groups=groups, append=False) assert ret is True @@ -141,7 +139,7 @@ def test_chgroups_list_append_false_str(account_str): assert ret["groups"].sort() == groups.sort() -def test_chgroups_list_append_false_int(account_int): +def test_chgroups_list_append_false_int(user, account_int): groups = ["Backup Operators", "Guests"] ret = user.chgroups(account_int.username, groups=groups, append=False) assert ret is True @@ -149,7 +147,7 @@ def test_chgroups_list_append_false_int(account_int): assert ret["groups"].sort() == groups.sort() -def test_chhome_str(account_str): +def test_chhome_str(user, account_str): home = r"C:\spongebob\squarepants" ret = user.chhome(name=account_str.username, home=home) assert ret is True @@ -157,7 +155,7 @@ def test_chhome_str(account_str): assert ret["home"] == home -def test_chhome_int(account_int): +def test_chhome_int(user, account_int): home = r"C:\spongebob\squarepants" ret = user.chhome(name=account_int.username, home=home) assert ret is True @@ -165,7 +163,7 @@ def test_chhome_int(account_int): assert ret["home"] == home -def test_chprofile_str(account_str): +def test_chprofile_str(user, account_str): profile = r"C:\spongebob\squarepants" ret = user.chprofile(name=account_str.username, profile=profile) assert ret is True @@ -173,7 +171,7 @@ def test_chprofile_str(account_str): assert ret["profile"] == profile -def test_chprofile_int(account_int): +def test_chprofile_int(user, account_int): profile = r"C:\spongebob\squarepants" ret = user.chprofile(name=account_int.username, profile=profile) assert ret is True @@ -181,70 +179,70 @@ def test_chprofile_int(account_int): assert ret["profile"] == profile -def test_delete_str(account_str): +def test_delete_str(user, account_str): ret = user.delete(name=account_str.username) assert ret is True assert user.info(name=account_str.username) == {} -def test_delete_int(account_int): +def test_delete_int(user, account_int): ret = user.delete(name=account_int.username) assert ret is True assert user.info(name=account_int.username) == {} -def test_get_user_sid_str(account_str): +def test_get_user_sid_str(user, account_str): ret = user.get_user_sid(account_str.username) assert ret.startswith("S-1-5") -def test_get_user_sid_int(account_int): +def test_get_user_sid_int(user, account_int): ret = user.get_user_sid(account_int.username) assert ret.startswith("S-1-5") -def test_info_str(account_str): +def test_info_str(user, account_str): ret = user.info(account_str.username) assert ret["name"] == account_str.username assert ret["uid"].startswith("S-1-5") -def test_info_int(account_int): +def test_info_int(user, account_int): ret = user.info(account_int.username) assert ret["name"] == account_int.username assert ret["uid"].startswith("S-1-5") -def test_list_groups_str(account_str): +def test_list_groups_str(user, account_str): ret = user.list_groups(account_str.username) assert ret == ["Users"] -def test_list_groups_int(account_int): +def test_list_groups_int(user, account_int): ret = user.list_groups(account_int.username) assert ret == ["Users"] -def test_list_users(): +def test_list_users(user): ret = user.list_users() assert "Administrator" in ret -def test_removegroup_str(account_str): +def test_removegroup_str(user, account_str): ret = user.removegroup(account_str.username, "Users") assert ret is True ret = user.info(account_str.username) assert ret["groups"] == [] -def test_removegroup_int(account_int): +def test_removegroup_int(user, account_int): ret = user.removegroup(account_int.username, "Users") assert ret is True ret = user.info(account_int.username) assert ret["groups"] == [] -def test_rename_str(account_str): +def test_rename_str(user, account_str): new_name = random_string("test-account-", uppercase=False) ret = user.rename(name=account_str.username, new_name=new_name) assert ret is True @@ -254,13 +252,13 @@ def test_rename_str(account_str): assert ret is True -def test_rename_str_missing(account_str): +def test_rename_str_missing(user, account_str): missing = random_string("test-account-", uppercase=False) with pytest.raises(CommandExecutionError): user.rename(name=missing, new_name="spongebob") -def test_rename_str_existing(account_str): +def test_rename_str_existing(user, account_str): new_existing = random_string("test-account-", uppercase=False) ret = user.add(name=new_existing) assert ret is True @@ -272,7 +270,7 @@ def test_rename_str_existing(account_str): assert new_existing not in user.list_users() -def test_rename_int(account_int): +def test_rename_int(user, account_int): new_name = random_string("", uppercase=False, lowercase=False, digits=True) ret = user.rename(name=account_int.username, new_name=new_name) assert ret is True @@ -282,13 +280,13 @@ def test_rename_int(account_int): assert ret is True -def test_rename_int_missing(account_int): +def test_rename_int_missing(user, account_int): missing = random_string("", uppercase=False, lowercase=False, digits=True) with pytest.raises(CommandExecutionError): user.rename(name=missing, new_name="spongebob") -def test_rename_int_existing(account_int): +def test_rename_int_existing(user, account_int): new_existing = random_string("", uppercase=False, lowercase=False, digits=True) ret = user.add(name=new_existing) assert ret is True @@ -300,14 +298,14 @@ def test_rename_int_existing(account_int): assert new_existing not in user.list_users() -def test_setpassword_str(account_str): +def test_setpassword_str(user, account_str): ret = user.setpassword(account_str.username, password="Sup3rS3cret") # We have no way of verifying the password was changed on Windows, so the # best we can do is check that the command completed successfully assert ret is True -def test_setpassword_int(account_int): +def test_setpassword_int(user, account_int): ret = user.setpassword(account_int.username, password="Sup3rS3cret") # We have no way of verifying the password was changed on Windows, so the # best we can do is check that the command completed successfully @@ -333,7 +331,7 @@ def test_setpassword_int(account_int): ("disallow_change_password", False, "", None), ], ) -def test_update_str(value_name, new_value, info_field, expected, account_str): +def test_update_str(user, value_name, new_value, info_field, expected, account_str): setting = {value_name: new_value} ret = user.update(account_str.username, **setting) assert ret is True From 017f714e96dd057c00526a7bbf7029f83cdeb7bb Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 20 Mar 2024 11:52:12 -0600 Subject: [PATCH 069/102] Use sorted instead of sort --- tests/pytests/functional/modules/test_win_useradd.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/tests/pytests/functional/modules/test_win_useradd.py b/tests/pytests/functional/modules/test_win_useradd.py index 5396c39bb70..01f4a71e349 100644 --- a/tests/pytests/functional/modules/test_win_useradd.py +++ b/tests/pytests/functional/modules/test_win_useradd.py @@ -101,7 +101,7 @@ def test_chgroups_single_str(user, account_str): assert ret is True ret = user.info(account_str.username) groups.append("Users") - assert ret["groups"].sort() == groups.sort() + assert sorted(ret["groups"]) == sorted(groups) def test_chgroups_single_int(user, account_int): @@ -110,7 +110,7 @@ def test_chgroups_single_int(user, account_int): assert ret is True ret = user.info(account_int.username) groups.append("Users") - assert ret["groups"].sort() == groups.sort() + assert sorted(ret["groups"]) == sorted(groups) def test_chgroups_list_str(user, account_str): @@ -119,7 +119,7 @@ def test_chgroups_list_str(user, account_str): assert ret is True ret = user.info(account_str.username) groups.append("Users") - assert ret["groups"].sort() == groups.sort() + assert sorted(ret["groups"]) == sorted(groups) def test_chgroups_list_int(user, account_int): @@ -128,7 +128,7 @@ def test_chgroups_list_int(user, account_int): assert ret is True ret = user.info(account_int.username) groups.append("Users") - assert ret["groups"].sort() == groups.sort() + assert sorted(ret["groups"]) == sorted(groups) def test_chgroups_list_append_false_str(user, account_str): @@ -136,7 +136,7 @@ def test_chgroups_list_append_false_str(user, account_str): ret = user.chgroups(account_str.username, groups=groups, append=False) assert ret is True ret = user.info(account_str.username) - assert ret["groups"].sort() == groups.sort() + assert sorted(ret["groups"]) == sorted(groups) def test_chgroups_list_append_false_int(user, account_int): @@ -144,7 +144,7 @@ def test_chgroups_list_append_false_int(user, account_int): ret = user.chgroups(account_int.username, groups=groups, append=False) assert ret is True ret = user.info(account_int.username) - assert ret["groups"].sort() == groups.sort() + assert sorted(ret["groups"]) == sorted(groups) def test_chhome_str(user, account_str): From 87d3344a7a374c74d6c906423bb32223d9dfcd8c Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Thu, 21 Mar 2024 09:55:48 -0600 Subject: [PATCH 070/102] Fix failing package tests --- salt/modules/win_useradd.py | 7 ++++++- tests/support/pytest/helpers.py | 16 ++++++++-------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/salt/modules/win_useradd.py b/salt/modules/win_useradd.py index 1a667909afd..b21f5c70812 100644 --- a/salt/modules/win_useradd.py +++ b/salt/modules/win_useradd.py @@ -122,7 +122,10 @@ def add( user_info["name"] = str(name) else: return False - user_info["password"] = str(password) + if password: + user_info["password"] = str(password) + else: + user_info["password"] = None user_info["priv"] = win32netcon.USER_PRIV_USER user_info["home_dir"] = home user_info["comment"] = description @@ -234,6 +237,8 @@ def update( # Update the user object with new settings if password: user_info["password"] = str(password) + else: + user_info["password"] = None if home: user_info["home_dir"] = home if homedrive: diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py index 51c59a54ede..00beffa66a0 100644 --- a/tests/support/pytest/helpers.py +++ b/tests/support/pytest/helpers.py @@ -63,7 +63,7 @@ def temp_state_file(name, contents, saltenv="base", strip_first_newline=True): saltenv(str): The salt env to use. Either ``base`` or ``prod`` strip_first_newline(bool): - Wether to strip the initial first new line char or not. + Whether to strip the initial first new line char or not. """ if saltenv == "base": @@ -109,7 +109,7 @@ def temp_pillar_file(name, contents, saltenv="base", strip_first_newline=True): saltenv(str): The salt env to use. Either ``base`` or ``prod`` strip_first_newline(bool): - Wether to strip the initial first new line char or not. + Whether to strip the initial first new line char or not. """ if saltenv == "base": @@ -236,7 +236,7 @@ class TestGroup: self._delete_group = True log.debug("Created system group: %s", self) else: - log.debug("Reusing exising system group: %s", self) + log.debug("Reusing existing system group: %s", self) if self.members: ret = self.sminion.functions.group.members( self.name, members_list=self.members @@ -325,14 +325,14 @@ class TestAccount: if not self.sminion.functions.user.info(self.username): log.debug("Creating system account: %s", self) ret = self.sminion.functions.user.add(self.username) - assert ret + assert ret is True self._delete_account = True if salt.utils.platform.is_darwin() or salt.utils.platform.is_windows(): password = self.password else: password = self.hashed_password ret = self.sminion.functions.shadow.set_password(self.username, password) - assert ret + assert ret is True assert self.username in self.sminion.functions.user.list_users() if self._group: self.group.__enter__() @@ -344,7 +344,7 @@ class TestAccount: if self._delete_account: log.debug("Created system account: %s", self) else: - log.debug("Reusing exisintg system account: %s", self) + log.debug("Reusing existing system account: %s", self) # Run tests return self @@ -700,7 +700,7 @@ class EntropyGenerator: kernel_entropy_file = pathlib.Path("/proc/sys/kernel/random/entropy_avail") kernel_poolsize_file = pathlib.Path("/proc/sys/kernel/random/poolsize") if not kernel_entropy_file.exists(): - log.info("The '%s' file is not avilable", kernel_entropy_file) + log.info("The '%s' file is not available", kernel_entropy_file) return self.current_entropy = int( @@ -709,7 +709,7 @@ class EntropyGenerator: log.info("Available Entropy: %s", self.current_entropy) if not kernel_poolsize_file.exists(): - log.info("The '%s' file is not avilable", kernel_poolsize_file) + log.info("The '%s' file is not available", kernel_poolsize_file) else: self.current_poolsize = int( kernel_poolsize_file.read_text(encoding="utf-8").strip() From 6fb74a0d1a6d5299ee35a8dbc332f925f0c99c33 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 22 Mar 2024 11:49:22 +0000 Subject: [PATCH 071/102] Reap child processes after each of the ssh tests in case of timeouts --- tests/conftest.py | 45 +----------------- tests/pytests/integration/ssh/conftest.py | 11 +++++ tests/support/pytest/helpers.py | 57 +++++++++++++++++++++++ 3 files changed, 70 insertions(+), 43 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index 8c45640de5e..4694f38ea6c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,7 +5,6 @@ import logging import os import pathlib -import pprint import re import shutil import stat @@ -16,7 +15,6 @@ from unittest import TestCase # pylint: disable=blacklisted-module import _pytest.logging import _pytest.skipping import more_itertools -import psutil import pytest import salt @@ -1295,7 +1293,6 @@ def salt_call_cli(salt_minion_factory): @pytest.fixture(scope="session", autouse=True) def bridge_pytest_and_runtests( - reap_stray_processes, salt_factories, salt_syndic_master_factory, salt_syndic_factory, @@ -1332,6 +1329,8 @@ def bridge_pytest_and_runtests( salt_syndic_factory.config["conf_file"] ) RUNTIME_VARS.TMP_SSH_CONF_DIR = str(sshd_config_dir) + with reap_stray_processes(): + yield @pytest.fixture(scope="session") @@ -1646,46 +1645,6 @@ def from_filenames_collection_modifyitems(config, items): # ----- Custom Fixtures ---------------------------------------------------------------------------------------------> -@pytest.fixture(scope="session") -def reap_stray_processes(): - # Run tests - yield - - children = psutil.Process(os.getpid()).children(recursive=True) - if not children: - log.info("No astray processes found") - return - - def on_terminate(proc): - log.debug("Process %s terminated with exit code %s", proc, proc.returncode) - - if children: - # Reverse the order, sublings first, parents after - children.reverse() - log.warning( - "Test suite left %d astray processes running. Killing those processes:\n%s", - len(children), - pprint.pformat(children), - ) - - _, alive = psutil.wait_procs(children, timeout=3, callback=on_terminate) - for child in alive: - try: - child.kill() - except psutil.NoSuchProcess: - continue - - _, alive = psutil.wait_procs(alive, timeout=3, callback=on_terminate) - if alive: - # Give up - for child in alive: - log.warning( - "Process %s survived SIGKILL, giving up:\n%s", - child, - pprint.pformat(child.as_dict()), - ) - - @pytest.fixture(scope="session") def sminion(): return create_sminion() diff --git a/tests/pytests/integration/ssh/conftest.py b/tests/pytests/integration/ssh/conftest.py index 9b83df256b0..75809181be5 100644 --- a/tests/pytests/integration/ssh/conftest.py +++ b/tests/pytests/integration/ssh/conftest.py @@ -1,5 +1,7 @@ import pytest +from tests.support.pytest.helpers import reap_stray_processes + @pytest.fixture(scope="package", autouse=True) def _auto_skip_on_fedora_39(grains): @@ -13,6 +15,15 @@ def _auto_skip_on_fedora_39(grains): ) +@pytest.fixture(autouse=True) +def _reap_stray_processes(grains): + # when tests timeout, we migth leave child processes behind + # nuke them + with reap_stray_processes(): + # Run test + yield + + @pytest.fixture(scope="module") def state_tree(base_env_state_tree_root_dir): top_file = """ diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py index 871ddc872fe..16cc2f3e83b 100644 --- a/tests/support/pytest/helpers.py +++ b/tests/support/pytest/helpers.py @@ -8,6 +8,7 @@ import logging import os import pathlib +import pprint import shutil import subprocess import tempfile @@ -18,6 +19,7 @@ import warnings from contextlib import contextmanager import attr +import psutil import pytest import requests from saltfactories.utils import random_string @@ -833,6 +835,61 @@ def download_file(url, dest, auth=None): return dest +@contextmanager +def reap_stray_processes(pid: int = os.getpid()): + + try: + pre_children = psutil.Process(pid).children(recursive=True) + # Do stuff + yield + finally: + post_children = psutil.Process(pid).children(recursive=True) + + children = [] + for process in post_children: + if process in pre_children: + # Process existed before entering the context + continue + if not psutil.pid_exists(process.pid): + # Process just died + continue + # This process is alive and was not running before entering the context + children.append(process) + + if not children: + log.info("No astray processes found") + return + + def on_terminate(proc): + log.debug("Process %s terminated with exit code %s", proc, proc.returncode) + + if children: + # Reverse the order, sublings first, parents after + children.reverse() + log.warning( + "Test suite left %d astray processes running. Killing those processes:\n%s", + len(children), + pprint.pformat(children), + ) + + _, alive = psutil.wait_procs(children, timeout=3, callback=on_terminate) + for child in alive: + try: + child.kill() + except psutil.NoSuchProcess: + continue + + _, alive = psutil.wait_procs(alive, timeout=3, callback=on_terminate) + if alive: + # Give up + for child in alive: + log.warning( + "Process %s survived SIGKILL, giving up:\n%s", + child, + pprint.pformat(child.as_dict()), + ) + + # Only allow star importing the functions defined in this module __all__ = [ name From 53fce6957d5582645782bf140ef774c9bd30797d Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 25 Mar 2024 15:49:38 -0600 Subject: [PATCH 072/102] Fix RedHat family systemd restart on upgrade, and updated tests --- changelog/66143.fixed.md | 1 + pkg/rpm/salt.spec | 8 ++--- .../pkg/downgrade/test_salt_downgrade.py | 33 ++++++++++++------- .../pytests/pkg/upgrade/test_salt_upgrade.py | 33 ++++++++++++------- 4 files changed, 47 insertions(+), 28 deletions(-) create mode 100644 changelog/66143.fixed.md diff --git a/changelog/66143.fixed.md b/changelog/66143.fixed.md new file mode 100644 index 00000000000..58ecbd163c5 --- /dev/null +++ b/changelog/66143.fixed.md @@ -0,0 +1 @@ +Fix systemctl with "try-restart" instead of "retry-restart" within the RPM spec, properly restarting upgraded services diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 2821cfd2f05..540f2fb42fe 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -475,7 +475,7 @@ ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud # %%systemd_post salt-master.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl retry-restart salt-master.service >/dev/null 2>&1 || : + systemctl try-restart salt-master.service >/dev/null 2>&1 || : else # Initial installation systemctl preset salt-master.service >/dev/null 2>&1 || : @@ -503,7 +503,7 @@ fi # %%systemd_post salt-syndic.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl retry-restart salt-syndic.service >/dev/null 2>&1 || : + systemctl try-restart salt-syndic.service >/dev/null 2>&1 || : else # Initial installation systemctl preset salt-syndic.service >/dev/null 2>&1 || : @@ -514,7 +514,7 @@ ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic # %%systemd_post salt-minion.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl retry-restart salt-minion.service >/dev/null 2>&1 || : + systemctl try-restart salt-minion.service >/dev/null 2>&1 || : else # Initial installation systemctl preset salt-minion.service >/dev/null 2>&1 || : @@ -543,7 +543,7 @@ ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh # %%systemd_post salt-api.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl retry-restart salt-api.service >/dev/null 2>&1 || : + systemctl try-restart salt-api.service >/dev/null 2>&1 || : else # Initial installation systemctl preset salt-api.service >/dev/null 2>&1 || : diff --git a/tests/pytests/pkg/downgrade/test_salt_downgrade.py b/tests/pytests/pkg/downgrade/test_salt_downgrade.py index ec090d17dda..43fe1cc30dd 100644 --- a/tests/pytests/pkg/downgrade/test_salt_downgrade.py +++ b/tests/pytests/pkg/downgrade/test_salt_downgrade.py @@ -36,17 +36,25 @@ def test_salt_downgrade(salt_call_cli, install_salt): assert "Authentication information could" in use_lib.stderr # Verify there is a running minion by getting its PID + salt_name = "salt" if platform.is_windows(): process_name = "salt-minion.exe" else: process_name = "salt-minion" - old_pid = None + + old_pid = [] + + # psutil process name only returning first part of the command '/opt/saltstack/' + # need to check all of command line for salt-minion + # ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager'] + # and psutil is only returning the salt-minion once for proc in psutil.process_iter(): - if process_name in proc.name(): - if psutil.Process(proc.ppid()).name() != process_name: - old_pid = proc.pid - break - assert old_pid is not None + if salt_name in proc.name(): + cmdl_strg = " ".join(str(element) for element in proc.cmdline()) + if process_name in cmdl_strg: + old_pid.append(proc.pid) + + assert old_pid # Downgrade Salt to the previous version and test install_salt.install(downgrade=True) @@ -61,13 +69,14 @@ def test_salt_downgrade(salt_call_cli, install_salt): # Verify there is a new running minion by getting its PID and comparing it # with the PID from before the upgrade - new_pid = None + new_pid = [] for proc in psutil.process_iter(): - if process_name in proc.name(): - if psutil.Process(proc.ppid()).name() != process_name: - new_pid = proc.pid - break - assert new_pid is not None + if salt_name in proc.name(): + cmdl_strg = " ".join(str(element) for element in proc.cmdline()) + if process_name in cmdl_strg: + new_pid.append(proc.pid) + + assert new_pid assert new_pid != old_pid ret = install_salt.proc.run(bin_file, "--version") diff --git a/tests/pytests/pkg/upgrade/test_salt_upgrade.py b/tests/pytests/pkg/upgrade/test_salt_upgrade.py index 241a3c63d0f..d376d581adb 100644 --- a/tests/pytests/pkg/upgrade/test_salt_upgrade.py +++ b/tests/pytests/pkg/upgrade/test_salt_upgrade.py @@ -32,17 +32,25 @@ def test_salt_upgrade(salt_call_cli, install_salt): assert "Authentication information could" in use_lib.stderr # Verify there is a running minion by getting its PID + salt_name = "salt" if platform.is_windows(): process_name = "salt-minion.exe" else: process_name = "salt-minion" - old_pid = None + + old_pid = [] + + # psutil process name only returning first part of the command '/opt/saltstack/' + # need to check all of command line for salt-minion + # ['/opt/saltstack/salt/bin/python3.10 /usr/bin/salt-minion MultiMinionProcessManager MinionProcessManager'] + # and psutil is only returning the salt-minion once for proc in psutil.process_iter(): - if process_name in proc.name(): - if psutil.Process(proc.ppid()).name() != process_name: - old_pid = proc.pid - break - assert old_pid is not None + if salt_name in proc.name(): + cmdl_strg = " ".join(str(element) for element in proc.cmdline()) + if process_name in cmdl_strg: + old_pid.append(proc.pid) + + assert old_pid # Upgrade Salt from previous version and test install_salt.install(upgrade=True) @@ -54,13 +62,14 @@ def test_salt_upgrade(salt_call_cli, install_salt): # Verify there is a new running minion by getting its PID and comparing it # with the PID from before the upgrade - new_pid = None + new_pid = [] for proc in psutil.process_iter(): - if process_name in proc.name(): - if psutil.Process(proc.ppid()).name() != process_name: - new_pid = proc.pid - break - assert new_pid is not None + if salt_name in proc.name(): + cmdl_strg = " ".join(str(element) for element in proc.cmdline()) + if process_name in cmdl_strg: + new_pid.append(proc.pid) + + assert new_pid assert new_pid != old_pid if install_salt.relenv: From 61df4d8c869d021e472f1d214dc5831b0da2b73f Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 26 Mar 2024 15:45:24 -0600 Subject: [PATCH 073/102] Ensure path for systemctl to stop spoofing (security fix moved from classic spec) and debug helper scripts --- pkg/rpm/salt.spec | 46 +++++++++++++++++++++++----------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 540f2fb42fe..53f739f2481 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -176,7 +176,7 @@ cd $RPM_BUILD_DIR # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in # the /bin directory - find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; + find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh -x\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux $RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt @@ -439,16 +439,16 @@ find /etc/salt /opt/saltstack/salt /var/log/salt /var/cache/salt /var/run/salt \ # %%systemd_preun salt-syndic.service > /dev/null 2>&1 if [ $1 -eq 0 ] ; then # Package removal, not upgrade - systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || : - systemctl stop salt-syndic.service > /dev/null 2>&1 || : + /bin/systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || : + /bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || : fi %preun minion # %%systemd_preun salt-minion.service if [ $1 -eq 0 ] ; then # Package removal, not upgrade - systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || : - systemctl stop salt-minion.service > /dev/null 2>&1 || : + /bin/systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || : + /bin/systemctl stop salt-minion.service > /dev/null 2>&1 || : fi @@ -456,8 +456,8 @@ fi # %%systemd_preun salt-api.service if [ $1 -eq 0 ] ; then # Package removal, not upgrade - systemctl --no-reload disable salt-api.service > /dev/null 2>&1 || : - systemctl stop salt-api.service > /dev/null 2>&1 || : + /bin/systemctl --no-reload disable salt-api.service > /dev/null 2>&1 || : + /bin/systemctl stop salt-api.service > /dev/null 2>&1 || : fi @@ -475,10 +475,10 @@ ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud # %%systemd_post salt-master.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl try-restart salt-master.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || : else # Initial installation - systemctl preset salt-master.service >/dev/null 2>&1 || : + /bin/systemctl preset salt-master.service >/dev/null 2>&1 || : fi ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt ln -s -f /opt/saltstack/salt/salt-cp %{_bindir}/salt-cp @@ -503,10 +503,10 @@ fi # %%systemd_post salt-syndic.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl try-restart salt-syndic.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-syndic.service >/dev/null 2>&1 || : else # Initial installation - systemctl preset salt-syndic.service >/dev/null 2>&1 || : + /bin/systemctl preset salt-syndic.service >/dev/null 2>&1 || : fi ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic @@ -514,10 +514,10 @@ ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic # %%systemd_post salt-minion.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl try-restart salt-minion.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || : else # Initial installation - systemctl preset salt-minion.service >/dev/null 2>&1 || : + /bin/systemctl preset salt-minion.service >/dev/null 2>&1 || : fi ln -s -f /opt/saltstack/salt/salt-minion %{_bindir}/salt-minion ln -s -f /opt/saltstack/salt/salt-call %{_bindir}/salt-call @@ -543,10 +543,10 @@ ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh # %%systemd_post salt-api.service if [ $1 -gt 1 ] ; then # Upgrade - systemctl try-restart salt-api.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-api.service >/dev/null 2>&1 || : else # Initial installation - systemctl preset salt-api.service >/dev/null 2>&1 || : + /bin/systemctl preset salt-api.service >/dev/null 2>&1 || : fi ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api @@ -589,10 +589,10 @@ fi %postun master # %%systemd_postun_with_restart salt-master.service -systemctl daemon-reload >/dev/null 2>&1 || : +/bin/systemctl daemon-reload >/dev/null 2>&1 || : if [ $1 -ge 1 ] ; then # Package upgrade, not uninstall - systemctl try-restart salt-master.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || : fi if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then @@ -610,18 +610,18 @@ fi %postun syndic # %%systemd_postun_with_restart salt-syndic.service -systemctl daemon-reload >/dev/null 2>&1 || : +/bin/systemctl daemon-reload >/dev/null 2>&1 || : if [ $1 -ge 1 ] ; then # Package upgrade, not uninstall - systemctl try-restart salt-syndic.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-syndic.service >/dev/null 2>&1 || : fi %postun minion # %%systemd_postun_with_restart salt-minion.service -systemctl daemon-reload >/dev/null 2>&1 || : +/bin/systemctl daemon-reload >/dev/null 2>&1 || : if [ $1 -ge 1 ] ; then # Package upgrade, not uninstall - systemctl try-restart salt-minion.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || : fi if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then @@ -639,10 +639,10 @@ fi %postun api # %%systemd_postun_with_restart salt-api.service -systemctl daemon-reload >/dev/null 2>&1 || : +/bin/systemctl daemon-reload >/dev/null 2>&1 || : if [ $1 -ge 1 ] ; then # Package upgrade, not uninstall - systemctl try-restart salt-api.service >/dev/null 2>&1 || : + /bin/systemctl try-restart salt-api.service >/dev/null 2>&1 || : fi %changelog From 93dadb211608a52ea52f1fe209a0879bb0cde212 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 26 Mar 2024 15:53:58 -0600 Subject: [PATCH 074/102] Ensure symbolic links are created before using systemctl --- pkg/rpm/salt.spec | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 53f739f2481..df5730116d5 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -176,7 +176,7 @@ cd $RPM_BUILD_DIR # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in # the /bin directory - find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh -x\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; + find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux $RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt @@ -472,14 +472,6 @@ ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud %post master -# %%systemd_post salt-master.service -if [ $1 -gt 1 ] ; then - # Upgrade - /bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || : -else - # Initial installation - /bin/systemctl preset salt-master.service >/dev/null 2>&1 || : -fi ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt ln -s -f /opt/saltstack/salt/salt-cp %{_bindir}/salt-cp ln -s -f /opt/saltstack/salt/salt-key %{_bindir}/salt-key @@ -498,8 +490,17 @@ if [ $1 -lt 2 ]; then fi fi fi +# %%systemd_post salt-master.service +if [ $1 -gt 1 ] ; then + # Upgrade + /bin/systemctl try-restart salt-master.service >/dev/null 2>&1 || : +else + # Initial installation + /bin/systemctl preset salt-master.service >/dev/null 2>&1 || : +fi %post syndic +ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic # %%systemd_post salt-syndic.service if [ $1 -gt 1 ] ; then # Upgrade @@ -508,17 +509,8 @@ else # Initial installation /bin/systemctl preset salt-syndic.service >/dev/null 2>&1 || : fi -ln -s -f /opt/saltstack/salt/salt-syndic %{_bindir}/salt-syndic %post minion -# %%systemd_post salt-minion.service -if [ $1 -gt 1 ] ; then - # Upgrade - /bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || : -else - # Initial installation - /bin/systemctl preset salt-minion.service >/dev/null 2>&1 || : -fi ln -s -f /opt/saltstack/salt/salt-minion %{_bindir}/salt-minion ln -s -f /opt/saltstack/salt/salt-call %{_bindir}/salt-call ln -s -f /opt/saltstack/salt/salt-proxy %{_bindir}/salt-proxy @@ -535,11 +527,20 @@ if [ $1 -lt 2 ]; then fi fi fi +# %%systemd_post salt-minion.service +if [ $1 -gt 1 ] ; then + # Upgrade + /bin/systemctl try-restart salt-minion.service >/dev/null 2>&1 || : +else + # Initial installation + /bin/systemctl preset salt-minion.service >/dev/null 2>&1 || : +fi %post ssh ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh %post api +ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api # %%systemd_post salt-api.service if [ $1 -gt 1 ] ; then # Upgrade @@ -548,7 +549,6 @@ else # Initial installation /bin/systemctl preset salt-api.service >/dev/null 2>&1 || : fi -ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api %posttrans cloud From 78625d8db7d09053ce12d46c23da410f80d0b912 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 27 Mar 2024 10:36:41 -0600 Subject: [PATCH 075/102] Add leading slash to salt helper file paths as per dh_links requirement --- changelog/66280.fixed.md | 1 + pkg/debian/salt-master.links | 10 +++++----- 2 files changed, 6 insertions(+), 5 deletions(-) create mode 100644 changelog/66280.fixed.md diff --git a/changelog/66280.fixed.md b/changelog/66280.fixed.md new file mode 100644 index 00000000000..dafd798295b --- /dev/null +++ b/changelog/66280.fixed.md @@ -0,0 +1 @@ +Add leading slash to salt helper file paths as per dh_links requirement diff --git a/pkg/debian/salt-master.links b/pkg/debian/salt-master.links index 6e593930462..e6c0ef2446a 100644 --- a/pkg/debian/salt-master.links +++ b/pkg/debian/salt-master.links @@ -1,6 +1,6 @@ opt/saltstack/salt/salt-master /usr/bin/salt-master -opt/saltstack/salt/salt usr/bin/salt -opt/saltstack/salt/salt-cp usr/bin/salt-cp -opt/saltstack/salt/salt-key usr/bin/salt-key -opt/saltstack/salt/salt-run usr/bin/salt-run -opt/saltstack/salt/spm usr/bin/spm +opt/saltstack/salt/salt /usr/bin/salt +opt/saltstack/salt/salt-cp /usr/bin/salt-cp +opt/saltstack/salt/salt-key /usr/bin/salt-key +opt/saltstack/salt/salt-run /usr/bin/salt-run +opt/saltstack/salt/spm /usr/bin/spm From 5c4e810836a3d1437fdaddef44c09580e9060f09 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 14:20:50 +0000 Subject: [PATCH 076/102] Make jinja rendering strict. Undefined variable use throws error. --- .github/workflows/templates/build-packages.yml.jinja | 2 +- .github/workflows/templates/ci.yml.jinja | 2 ++ tools/precommit/workflows.py | 3 ++- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 91adfb1c6e6..745bcc3c9ca 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -17,7 +17,7 @@ relenv-version: "<{ relenv_version }>" python-version: "<{ python_version }>" source: "<{ backend }>" - <%- if gh_environment %> + <%- if gh_environment != "ci" %> environment: <{ gh_environment }> sign-macos-packages: true sign-windows-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %> diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index ec15f70ca74..0553e799c89 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -1,3 +1,5 @@ +<%- set gh_environment = gh_environment|default("ci") %> + <%- extends 'layout.yml.jinja' %> <%- set pre_commit_version = "3.0.4" %> diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index d08567dca9c..27219a90749 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -11,7 +11,7 @@ import shutil from typing import TYPE_CHECKING, cast import yaml -from jinja2 import Environment, FileSystemLoader +from jinja2 import Environment, FileSystemLoader, StrictUndefined from ptscripts import Context, command_group import tools.utils @@ -292,6 +292,7 @@ def generate_workflows(ctx: Context): "jinja2.ext.do", ], loader=FileSystemLoader(str(TEMPLATES)), + undefined=StrictUndefined, ) for workflow_name, details in workflows.items(): if TYPE_CHECKING: From b5568458504ad8cdd3047651db518230577433cf Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 26 Mar 2024 18:36:20 +0000 Subject: [PATCH 077/102] Bump to ``dorny/paths-filter@v3`` --- .github/workflows/ci.yml | 2 +- .github/workflows/nightly.yml | 2 +- .github/workflows/scheduled.yml | 2 +- .github/workflows/staging.yml | 2 +- .github/workflows/templates/layout.yml.jinja | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 98dd746b9d8..36a4c6f2222 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -59,7 +59,7 @@ jobs: - name: Get Changed Files if: ${{ github.event_name == 'pull_request'}} id: changed-files - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 with: token: ${{ github.token }} list-files: json diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 8f611d84b08..5d540e60e67 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -103,7 +103,7 @@ jobs: - name: Get Changed Files if: ${{ github.event_name == 'pull_request'}} id: changed-files - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 with: token: ${{ github.token }} list-files: json diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 35d595505d6..3369ff77ae1 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -93,7 +93,7 @@ jobs: - name: Get Changed Files if: ${{ github.event_name == 'pull_request'}} id: changed-files - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 with: token: ${{ github.token }} list-files: json diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index de37f95809f..a48504995b7 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -89,7 +89,7 @@ jobs: - name: Get Changed Files if: ${{ github.event_name == 'pull_request'}} id: changed-files - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 with: token: ${{ github.token }} list-files: json diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index fba66a9dc1a..9d65b40d60c 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -107,7 +107,7 @@ jobs: - name: Get Changed Files if: ${{ github.event_name == 'pull_request'}} id: changed-files - uses: dorny/paths-filter@v2 + uses: dorny/paths-filter@v3 with: token: ${{ github.token }} list-files: json From 62f13b32ecaf45edcb575c5a62f0e3c5f336b9b9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 08:28:37 +0000 Subject: [PATCH 078/102] Just "grep" what we're after Signed-off-by: Pedro Algarvio --- pkg/windows/build.ps1 | 6 ++---- pkg/windows/build_python.ps1 | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/pkg/windows/build.ps1 b/pkg/windows/build.ps1 index ecd18832786..72e29fc1d13 100644 --- a/pkg/windows/build.ps1 +++ b/pkg/windows/build.ps1 @@ -107,10 +107,8 @@ if ( [String]::IsNullOrEmpty($Version) ) { $yaml = Get-Content -Path "$PROJECT_DIR\cicd\shared-gh-workflows-context.yml" $dict_versions = @{} -$yaml | ForEach-Object { - $val1, $val2 = $_ -split ": " - $dict_versions[$val1] = $val2.Trim("""") -} +$dict_versions["python_version"]=($yaml | Select-String -Pattern "python_version: (.*)").matches.groups[1].Value.Trim("""") +$dict_versions["relenv_version"]=($yaml | Select-String -Pattern "relenv_version: (.*)").matches.groups[1].Value.Trim("""") if ( [String]::IsNullOrEmpty($PythonVersion) ) { $PythonVersion = $dict_versions["python_version"] diff --git a/pkg/windows/build_python.ps1 b/pkg/windows/build_python.ps1 index 28aee58fbd2..9257ae79456 100644 --- a/pkg/windows/build_python.ps1 +++ b/pkg/windows/build_python.ps1 @@ -77,10 +77,8 @@ function Write-Result($result, $ForegroundColor="Green") { $yaml = Get-Content -Path "$PROJECT_DIR\cicd\shared-gh-workflows-context.yml" $dict_versions = @{} -$yaml | ForEach-Object { - $val1, $val2 = $_ -split ": " - $dict_versions[$val1] = $val2.Trim("""") -} +$dict_versions["python_version"]=($yaml | Select-String -Pattern "python_version: (.*)").matches.groups[1].Value.Trim("""") +$dict_versions["relenv_version"]=($yaml | Select-String -Pattern "relenv_version: (.*)").matches.groups[1].Value.Trim("""") if ( [String]::IsNullOrEmpty($Version) ) { $Version = $dict_versions["python_version"] From 0d27cf6c1f089313c279d7e049ef682462200685 Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Mon, 25 Mar 2024 12:37:32 -0500 Subject: [PATCH 079/102] Reduce test range in PRs and branch merges --- .github/workflows/ci.yml | 1133 ---------------------------------- tools/precommit/workflows.py | 39 +- 2 files changed, 33 insertions(+), 1139 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 36a4c6f2222..6804a525f66 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -488,48 +488,6 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" - almalinux-8-pkg-tests: - name: Alma Linux 8 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: almalinux-8 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - almalinux-8-arm64-pkg-tests: - name: Alma Linux 8 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: almalinux-8-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-pkg-tests: name: Alma Linux 9 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -551,90 +509,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-arm64-pkg-tests: - name: Alma Linux 9 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: almalinux-9-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - amazonlinux-2-pkg-tests: - name: Amazon Linux 2 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: amazonlinux-2 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - amazonlinux-2-arm64-pkg-tests: - name: Amazon Linux 2 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: amazonlinux-2-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - amazonlinux-2023-pkg-tests: - name: Amazon Linux 2023 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: amazonlinux-2023 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - amazonlinux-2023-arm64-pkg-tests: name: Amazon Linux 2023 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -656,283 +530,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - centos-7-pkg-tests: - name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - debian-10-pkg-tests: - name: Debian 10 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: debian-10 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - debian-11-pkg-tests: - name: Debian 11 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: debian-11 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - debian-11-arm64-pkg-tests: - name: Debian 11 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: debian-11-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - debian-12-pkg-tests: - name: Debian 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: debian-12 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - debian-12-arm64-pkg-tests: - name: Debian 12 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: debian-12-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - photonos-4-pkg-tests: - name: Photon OS 4 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - fips: true - - photonos-4-arm64-pkg-tests: - name: Photon OS 4 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: photonos-4-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - fips: true - - photonos-5-pkg-tests: - name: Photon OS 5 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: photonos-5 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - fips: true - - photonos-5-arm64-pkg-tests: - name: Photon OS 5 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: photonos-5-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - fips: true - - ubuntu-2004-pkg-tests: - name: Ubuntu 20.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: ubuntu-20.04 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - ubuntu-2004-arm64-pkg-tests: - name: Ubuntu 20.04 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: ubuntu-20.04-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - ubuntu-2204-pkg-tests: - name: Ubuntu 22.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-linux.yml - with: - distro-slug: ubuntu-22.04 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -954,27 +551,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-12-pkg-tests: - name: macOS 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-macos.yml - with: - distro-slug: macos-12 - nox-session: ci-test-onedir - platform: macos - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: macos - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-13-pkg-tests: name: macOS 13 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -996,111 +572,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-13-xlarge-pkg-tests: - name: macOS 13 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-macos.yml - with: - distro-slug: macos-13-xlarge - nox-session: ci-test-onedir - platform: macos - arch: arm64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: macos - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - windows-2016-nsis-pkg-tests: - name: Windows 2016 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-windows.yml - with: - distro-slug: windows-2016 - nox-session: ci-test-onedir - platform: windows - arch: amd64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: NSIS - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - windows-2016-msi-pkg-tests: - name: Windows 2016 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-windows.yml - with: - distro-slug: windows-2016 - nox-session: ci-test-onedir - platform: windows - arch: amd64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: MSI - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - windows-2019-nsis-pkg-tests: - name: Windows 2019 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-windows.yml - with: - distro-slug: windows-2019 - nox-session: ci-test-onedir - platform: windows - arch: amd64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: NSIS - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - windows-2019-msi-pkg-tests: - name: Windows 2019 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-pkgs-onedir - - build-ci-deps - uses: ./.github/workflows/test-packages-action-windows.yml - with: - distro-slug: windows-2019 - nox-session: ci-test-onedir - platform: windows - arch: amd64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: MSI - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1143,48 +614,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - windows-2016: - name: Windows 2016 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-windows.yml - with: - distro-slug: windows-2016 - nox-session: ci-test-onedir - platform: windows - arch: amd64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - windows-2019: - name: Windows 2019 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-windows.yml - with: - distro-slug: windows-2019 - nox-session: ci-test-onedir - platform: windows - arch: amd64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - windows-2022: name: Windows 2022 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1206,27 +635,6 @@ jobs: workflow-slug: ci default-timeout: 180 - macos-12: - name: macOS 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-macos.yml - with: - distro-slug: macos-12 - nox-session: ci-test-onedir - platform: macos - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - macos-13: name: macOS 13 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -1248,69 +656,6 @@ jobs: workflow-slug: ci default-timeout: 180 - macos-13-xlarge: - name: macOS 13 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-macos.yml - with: - distro-slug: macos-13-xlarge - nox-session: ci-test-onedir - platform: macos - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - almalinux-8: - name: Alma Linux 8 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: almalinux-8 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - almalinux-8-arm64: - name: Alma Linux 8 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: almalinux-8-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - almalinux-9: name: Alma Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1332,90 +677,6 @@ jobs: workflow-slug: ci default-timeout: 180 - almalinux-9-arm64: - name: Alma Linux 9 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: almalinux-9-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - amazonlinux-2: - name: Amazon Linux 2 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: amazonlinux-2 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - amazonlinux-2-arm64: - name: Amazon Linux 2 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: amazonlinux-2-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - amazonlinux-2023: - name: Amazon Linux 2023 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: amazonlinux-2023 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1458,325 +719,6 @@ jobs: workflow-slug: ci default-timeout: 180 - centos-7: - name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: centos-7 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - debian-10: - name: Debian 10 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: debian-10 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - debian-11: - name: Debian 11 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: debian-11 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - debian-11-arm64: - name: Debian 11 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: debian-11-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - debian-12: - name: Debian 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: debian-12 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - debian-12-arm64: - name: Debian 12 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: debian-12-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - fedora-39: - name: Fedora 39 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: fedora-39 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - opensuse-15: - name: Opensuse 15 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: opensuse-15 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - photonos-4: - name: Photon OS 4 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - fips: true - - photonos-4-arm64: - name: Photon OS 4 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: photonos-4-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - fips: true - - photonos-5: - name: Photon OS 5 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: photonos-5 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - fips: true - - photonos-5-arm64: - name: Photon OS 5 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: photonos-5-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - fips: true - - ubuntu-2004: - name: Ubuntu 20.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: ubuntu-20.04 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - ubuntu-2004-arm64: - name: Ubuntu 20.04 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: ubuntu-20.04-arm64 - nox-session: ci-test-onedir - platform: linux - arch: arm64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - - ubuntu-2204: - name: Ubuntu 22.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-ci-deps - uses: ./.github/workflows/test-action-linux.yml - with: - distro-slug: ubuntu-22.04 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - gh-actions-python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - workflow-slug: ci - default-timeout: 180 - ubuntu-2204-arm64: name: Ubuntu 22.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1805,36 +747,11 @@ jobs: needs: - prepare-workflow - build-ci-deps - - windows-2016 - - windows-2019 - windows-2022 - - macos-12 - macos-13 - - macos-13-xlarge - - almalinux-8 - - almalinux-8-arm64 - almalinux-9 - - almalinux-9-arm64 - - amazonlinux-2 - - amazonlinux-2-arm64 - - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - - debian-10 - - debian-11 - - debian-11-arm64 - - debian-12 - - debian-12-arm64 - - fedora-39 - - opensuse-15 - - photonos-4 - - photonos-4-arm64 - - photonos-5 - - photonos-5-arm64 - - ubuntu-2004 - - ubuntu-2004-arm64 - - ubuntu-2204 - ubuntu-2204-arm64 steps: - uses: actions/checkout@v4 @@ -1959,66 +876,16 @@ jobs: - build-pkgs-src - combine-all-code-coverage - build-ci-deps - - windows-2016 - - windows-2019 - windows-2022 - - macos-12 - macos-13 - - macos-13-xlarge - - almalinux-8 - - almalinux-8-arm64 - almalinux-9 - - almalinux-9-arm64 - - amazonlinux-2 - - amazonlinux-2-arm64 - - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts - - centos-7 - - debian-10 - - debian-11 - - debian-11-arm64 - - debian-12 - - debian-12-arm64 - - fedora-39 - - opensuse-15 - - photonos-4 - - photonos-4-arm64 - - photonos-5 - - photonos-5-arm64 - - ubuntu-2004 - - ubuntu-2004-arm64 - - ubuntu-2204 - ubuntu-2204-arm64 - - almalinux-8-pkg-tests - - almalinux-8-arm64-pkg-tests - almalinux-9-pkg-tests - - almalinux-9-arm64-pkg-tests - - amazonlinux-2-pkg-tests - - amazonlinux-2-arm64-pkg-tests - - amazonlinux-2023-pkg-tests - amazonlinux-2023-arm64-pkg-tests - - centos-7-pkg-tests - - debian-10-pkg-tests - - debian-11-pkg-tests - - debian-11-arm64-pkg-tests - - debian-12-pkg-tests - - debian-12-arm64-pkg-tests - - photonos-4-pkg-tests - - photonos-4-arm64-pkg-tests - - photonos-5-pkg-tests - - photonos-5-arm64-pkg-tests - - ubuntu-2004-pkg-tests - - ubuntu-2004-arm64-pkg-tests - - ubuntu-2204-pkg-tests - ubuntu-2204-arm64-pkg-tests - - macos-12-pkg-tests - macos-13-pkg-tests - - macos-13-xlarge-pkg-tests - - windows-2016-nsis-pkg-tests - - windows-2016-msi-pkg-tests - - windows-2019-nsis-pkg-tests - - windows-2019-msi-pkg-tests - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests steps: diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index 27219a90749..40490229f4b 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -305,12 +305,11 @@ def generate_workflows(ctx: Context): f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from " f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..." ) + workflow_slug = details.get("slug") or workflow_name.lower().replace(" ", "-") context = { "template": template_path.relative_to(tools.utils.REPO_ROOT), "workflow_name": workflow_name, - "workflow_slug": ( - details.get("slug") or workflow_name.lower().replace(" ", "-") - ), + "workflow_slug": workflow_slug, "includes": includes, "conclusion_needs": NeedsTracker(), "test_salt_needs": NeedsTracker(), @@ -318,10 +317,16 @@ def generate_workflows(ctx: Context): "test_repo_needs": NeedsTracker(), "prepare_workflow_needs": NeedsTracker(), "build_repo_needs": NeedsTracker(), - "test_salt_listing": test_salt_listing, - "test_salt_pkg_listing": test_salt_pkg_listing, + "test_salt_listing": _filter_jobs_by_workflow( + workflow_slug, test_salt_listing + ), + "test_salt_pkg_listing": _filter_jobs_by_workflow( + workflow_slug, test_salt_pkg_listing + ), "build_ci_deps_listing": build_ci_deps_listing, - "test_salt_pkg_downloads_listing": test_salt_pkg_downloads_listing, + "test_salt_pkg_downloads_listing": _filter_jobs_by_workflow( + workflow_slug, test_salt_pkg_downloads_listing + ), "test_salt_pkg_downloads_needs_slugs": sorted( test_salt_pkg_downloads_needs_slugs ), @@ -339,6 +344,28 @@ def generate_workflows(ctx: Context): workflow_path.write_text(rendered_template.rstrip() + "\n") +def _filter_jobs_by_workflow( + workflow_slug: str, jobs: dict +) -> dict[str, list[tuple[str, str, str]]]: + short_worflow_os = ( + "almalinux-9", + "amazonlinux-2023-arm64", + "ubuntu-22.04-arm64", + "windows-2022", + "macos-13", + "archlinux-lts", + ) + if workflow_slug != "ci": + return jobs + + selected: dict[str, list[tuple[str, str, str]]] = {} + for platform in jobs: + for entry in jobs[platform]: + if entry[0] in short_worflow_os: + selected.setdefault(platform, []).append(entry) + return selected + + @cgroup.command( name="actionlint", arguments={ From fb58df01f4b1fd5d224c6e5ef7aa127d06bcd12c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 26 Mar 2024 08:55:37 +0000 Subject: [PATCH 080/102] Allow choosing which OS to run tests against using labels --- .github/workflows/ci.yml | 1136 ++++++++++++++++- .github/workflows/nightly.yml | 3 +- .github/workflows/scheduled.yml | 3 +- .github/workflows/staging.yml | 3 +- .github/workflows/templates/layout.yml.jinja | 3 +- .../templates/test-salt-pkg.yml.jinja | 12 + .../workflows/templates/test-salt.yml.jinja | 12 + tools/ci.py | 10 +- tools/precommit/workflows.py | 42 +- 9 files changed, 1186 insertions(+), 38 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6804a525f66..35ba2f32933 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -43,7 +43,8 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} - pull-labels: ${{ steps.get-pull-labels.outputs.labels }} + os-labels: ${{ steps.get-pull-labels.outputs.os-labels }} + pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} @@ -488,6 +489,48 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" + almalinux-8-pkg-tests: + name: Alma Linux 8 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: almalinux-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + almalinux-8-arm64-pkg-tests: + name: Alma Linux 8 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + almalinux-9-pkg-tests: name: Alma Linux 9 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -509,6 +552,90 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + almalinux-9-arm64-pkg-tests: + name: Alma Linux 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2-pkg-tests: + name: Amazon Linux 2 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2023-arm64-pkg-tests: name: Amazon Linux 2023 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -530,6 +657,283 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: + name: CentOS 7 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["centos-7", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-10-pkg-tests: + name: Debian 10 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-10", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-pkg-tests: + name: Debian 11 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-arm64-pkg-tests: + name: Debian 11 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-pkg-tests: + name: Debian 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-arm64-pkg-tests: + name: Debian 12 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-4-pkg-tests: + name: Photon OS 4 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-4-arm64-pkg-tests: + name: Photon OS 4 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-5-pkg-tests: + name: Photon OS 5 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-5-arm64-pkg-tests: + name: Photon OS 5 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + ubuntu-2004-pkg-tests: + name: Ubuntu 20.04 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2004-arm64-pkg-tests: + name: Ubuntu 20.04 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2204-pkg-tests: + name: Ubuntu 22.04 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-linux.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -551,6 +955,27 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + macos-12-pkg-tests: + name: macOS 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: macos + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + macos-13-pkg-tests: name: macOS 13 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -572,6 +997,111 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + macos-13-xlarge-pkg-tests: + name: macOS 13 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: macos + arch: arm64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2016-nsis-pkg-tests: + name: Windows 2016 NSIS Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2016", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-windows.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2016-msi-pkg-tests: + name: Windows 2016 MSI Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2016", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-windows.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: MSI + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2019-nsis-pkg-tests: + name: Windows 2019 NSIS Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2019", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-windows.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2019-msi-pkg-tests: + name: Windows 2019 MSI Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2019", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-pkgs-onedir + - build-ci-deps + uses: ./.github/workflows/test-packages-action-windows.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: MSI + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -614,6 +1144,48 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + windows-2016: + name: Windows 2016 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2016", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-windows.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + windows-2019: + name: Windows 2019 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2019", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-windows.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + windows-2022: name: Windows 2022 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -635,6 +1207,27 @@ jobs: workflow-slug: ci default-timeout: 180 + macos-12: + name: macOS 12 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: macos + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + macos-13: name: macOS 13 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -656,6 +1249,69 @@ jobs: workflow-slug: ci default-timeout: 180 + macos-13-xlarge: + name: macOS 13 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: macos + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + almalinux-8: + name: Alma Linux 8 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: almalinux-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + almalinux-8-arm64: + name: Alma Linux 8 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + almalinux-9: name: Alma Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -677,6 +1333,90 @@ jobs: workflow-slug: ci default-timeout: 180 + almalinux-9-arm64: + name: Alma Linux 9 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2: + name: Amazon Linux 2 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -719,6 +1459,325 @@ jobs: workflow-slug: ci default-timeout: 180 + centos-7: + name: CentOS 7 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["centos-7", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + debian-10: + name: Debian 10 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-10", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + debian-11: + name: Debian 11 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + debian-11-arm64: + name: Debian 11 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + debian-12: + name: Debian 12 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + debian-12-arm64: + name: Debian 12 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + fedora-39: + name: Fedora 39 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["fedora-39", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: fedora-39 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + opensuse-15: + name: Opensuse 15 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["opensuse-15", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: opensuse-15 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + photonos-4: + name: Photon OS 4 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + fips: true + + photonos-4-arm64: + name: Photon OS 4 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + fips: true + + photonos-5: + name: Photon OS 5 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + fips: true + + photonos-5-arm64: + name: Photon OS 5 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + fips: true + + ubuntu-2004: + name: Ubuntu 20.04 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + ubuntu-2004-arm64: + name: Ubuntu 20.04 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: arm64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + + ubuntu-2204: + name: Ubuntu 22.04 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + needs: + - prepare-workflow + - build-ci-deps + uses: ./.github/workflows/test-action-linux.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + workflow-slug: ci + default-timeout: 180 + ubuntu-2204-arm64: name: Ubuntu 22.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -747,11 +1806,36 @@ jobs: needs: - prepare-workflow - build-ci-deps + - windows-2016 + - windows-2019 - windows-2022 + - macos-12 - macos-13 + - macos-13-xlarge + - almalinux-8 + - almalinux-8-arm64 - almalinux-9 + - almalinux-9-arm64 + - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts + - centos-7 + - debian-10 + - debian-11 + - debian-11-arm64 + - debian-12 + - debian-12-arm64 + - fedora-39 + - opensuse-15 + - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 + - ubuntu-2004 + - ubuntu-2004-arm64 + - ubuntu-2204 - ubuntu-2204-arm64 steps: - uses: actions/checkout@v4 @@ -876,16 +1960,66 @@ jobs: - build-pkgs-src - combine-all-code-coverage - build-ci-deps + - windows-2016 + - windows-2019 - windows-2022 + - macos-12 - macos-13 + - macos-13-xlarge + - almalinux-8 + - almalinux-8-arm64 - almalinux-9 + - almalinux-9-arm64 + - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 - amazonlinux-2023-arm64 - archlinux-lts + - centos-7 + - debian-10 + - debian-11 + - debian-11-arm64 + - debian-12 + - debian-12-arm64 + - fedora-39 + - opensuse-15 + - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 + - ubuntu-2004 + - ubuntu-2004-arm64 + - ubuntu-2204 - ubuntu-2204-arm64 + - almalinux-8-pkg-tests + - almalinux-8-arm64-pkg-tests - almalinux-9-pkg-tests + - almalinux-9-arm64-pkg-tests + - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests - amazonlinux-2023-arm64-pkg-tests + - centos-7-pkg-tests + - debian-10-pkg-tests + - debian-11-pkg-tests + - debian-11-arm64-pkg-tests + - debian-12-pkg-tests + - debian-12-arm64-pkg-tests + - photonos-4-pkg-tests + - photonos-4-arm64-pkg-tests + - photonos-5-pkg-tests + - photonos-5-arm64-pkg-tests + - ubuntu-2004-pkg-tests + - ubuntu-2004-arm64-pkg-tests + - ubuntu-2204-pkg-tests - ubuntu-2204-arm64-pkg-tests + - macos-12-pkg-tests - macos-13-pkg-tests + - macos-13-xlarge-pkg-tests + - windows-2016-nsis-pkg-tests + - windows-2016-msi-pkg-tests + - windows-2019-nsis-pkg-tests + - windows-2019-msi-pkg-tests - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests steps: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 5d540e60e67..95c0332458e 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -87,7 +87,8 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} - pull-labels: ${{ steps.get-pull-labels.outputs.labels }} + os-labels: ${{ steps.get-pull-labels.outputs.os-labels }} + pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 3369ff77ae1..c1108976d7b 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -77,7 +77,8 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} - pull-labels: ${{ steps.get-pull-labels.outputs.labels }} + os-labels: ${{ steps.get-pull-labels.outputs.os-labels }} + pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index a48504995b7..0961f55aa8c 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -73,7 +73,8 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} - pull-labels: ${{ steps.get-pull-labels.outputs.labels }} + os-labels: ${{ steps.get-pull-labels.outputs.os-labels }} + pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 9d65b40d60c..211828c4386 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -91,7 +91,8 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} - pull-labels: ${{ steps.get-pull-labels.outputs.labels }} + os-labels: ${{ steps.get-pull-labels.outputs.os-labels }} + pull-labels: ${{ steps.get-pull-labels.outputs.test-labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index be07a1e6bca..7c37e539d75 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -4,7 +4,11 @@ <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> name: <{ display_name }> Package Test + <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- else %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + <%- endif %> needs: - prepare-workflow - build-pkgs-onedir @@ -36,7 +40,11 @@ <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> name: <{ display_name }> Package Test + <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + <%- else %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + <%- endif %> needs: - prepare-workflow - build-pkgs-onedir @@ -65,7 +73,11 @@ <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> name: <{ display_name }> <{ pkg_type }> Package Test + <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- else %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + <%- endif %> needs: - prepare-workflow - build-pkgs-onedir diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index e35e363156a..349438c362a 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -9,7 +9,11 @@ <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> name: <{ display_name }> Test + <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- else %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + <%- endif %> needs: - prepare-workflow - build-ci-deps @@ -36,7 +40,11 @@ <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> name: <{ display_name }> Test + <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + <%- else %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + <%- endif %> needs: - prepare-workflow - build-ci-deps @@ -62,7 +70,11 @@ <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> name: <{ display_name }> Test + <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- else %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + <%- endif %> needs: - prepare-workflow - build-ci-deps diff --git a/tools/ci.py b/tools/ci.py index 192cfee48f1..491df8019ed 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -949,10 +949,15 @@ def get_pr_test_labels( pr = gh_event["pull_request"]["number"] labels = _get_pr_test_labels_from_event_payload(gh_event) + os_labels = [] + test_labels = [] if labels: ctx.info(f"Test labels for pull-request #{pr} on {repository}:") - for name, description in labels: + for name, description in sorted(labels): ctx.info(f" * [yellow]{name}[/yellow]: {description}") + test_labels.append(name) + if name.startswith("test:os:"): + os_labels.append(name.split("test:os:", 1)[-1]) else: ctx.info(f"No test labels for pull-request #{pr} on {repository}") @@ -965,7 +970,8 @@ def get_pr_test_labels( ctx.info("Writing 'labels' to the github outputs file") with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"labels={json.dumps([label[0] for label in labels])}\n") + wfh.write(f"os-labels={json.dumps([label for label in os_labels])}\n") + wfh.write(f"test-labels={json.dumps([label for label in test_labels])}\n") ctx.exit(0) diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index 40490229f4b..e2748a06512 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -317,21 +317,23 @@ def generate_workflows(ctx: Context): "test_repo_needs": NeedsTracker(), "prepare_workflow_needs": NeedsTracker(), "build_repo_needs": NeedsTracker(), - "test_salt_listing": _filter_jobs_by_workflow( - workflow_slug, test_salt_listing - ), - "test_salt_pkg_listing": _filter_jobs_by_workflow( - workflow_slug, test_salt_pkg_listing - ), + "test_salt_listing": test_salt_listing, + "test_salt_pkg_listing": test_salt_pkg_listing, "build_ci_deps_listing": build_ci_deps_listing, - "test_salt_pkg_downloads_listing": _filter_jobs_by_workflow( - workflow_slug, test_salt_pkg_downloads_listing - ), + "test_salt_pkg_downloads_listing": test_salt_pkg_downloads_listing, "test_salt_pkg_downloads_needs_slugs": sorted( test_salt_pkg_downloads_needs_slugs ), "build_rpms_listing": build_rpms_listing, "build_debs_listing": build_debs_listing, + "mandatory_os": ( + "almalinux-9", + "amazonlinux-2023-arm64", + "archlinux-lts", + "macos-13", + "ubuntu-22.04-arm64", + "windows-2022", + ), } shared_context_file = ( tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" @@ -344,28 +346,6 @@ def generate_workflows(ctx: Context): workflow_path.write_text(rendered_template.rstrip() + "\n") -def _filter_jobs_by_workflow( - workflow_slug: str, jobs: dict -) -> dict[str, list[tuple[str, str, str]]]: - short_worflow_os = ( - "almalinux-9", - "amazonlinux-2023-arm64", - "ubuntu-22.04-arm64", - "windows-2022", - "macos-13", - "archlinux-lts", - ) - if workflow_slug != "ci": - return jobs - - selected: dict[str, list[tuple[str, str, str]]] = {} - for platform in jobs: - for entry in jobs[platform]: - if entry[0] in short_worflow_os: - selected.setdefault(platform, []).append(entry) - return selected - - @cgroup.command( name="actionlint", arguments={ From 6592ccba523f71b3d5618bb604520fb46872db33 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 14:21:56 +0000 Subject: [PATCH 081/102] Create `get_cicd_shared_context()` and `get_golden_images()` --- tools/pkg/build.py | 20 ++++++-------------- tools/precommit/workflows.py | 17 ++++------------- tools/testsuite/__init__.py | 8 +------- tools/testsuite/download.py | 8 +------- tools/utils/__init__.py | 29 ++++++++++++++++++++++------- tools/vm.py | 9 +++------ 6 files changed, 37 insertions(+), 54 deletions(-) diff --git a/tools/pkg/build.py b/tools/pkg/build.py index b6b18ba09a6..75173ac4898 100644 --- a/tools/pkg/build.py +++ b/tools/pkg/build.py @@ -14,7 +14,6 @@ import tarfile import zipfile from typing import TYPE_CHECKING -import yaml from ptscripts import Context, command_group import tools.utils @@ -30,13 +29,6 @@ build = command_group( ) -def _get_shared_constants(): - shared_constants = ( - tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" - ) - return yaml.safe_load(shared_constants.read_text()) - - @build.command( name="deb", arguments={ @@ -80,7 +72,7 @@ def debian( ) ctx.exit(1) ctx.info("Building the package from the source files") - shared_constants = _get_shared_constants() + shared_constants = tools.utils.get_cicd_shared_context() if not python_version: python_version = shared_constants["python_version"] if not relenv_version: @@ -152,7 +144,7 @@ def rpm( ) ctx.exit(1) ctx.info("Building the package from the source files") - shared_constants = _get_shared_constants() + shared_constants = tools.utils.get_cicd_shared_context() if not python_version: python_version = shared_constants["python_version"] if not relenv_version: @@ -237,7 +229,7 @@ def macos( if not onedir: # Prep the salt onedir if not building from an existing one - shared_constants = _get_shared_constants() + shared_constants = tools.utils.get_cicd_shared_context() if not python_version: python_version = shared_constants["python_version"] if not relenv_version: @@ -326,7 +318,7 @@ def windows( assert salt_version is not None assert arch is not None - shared_constants = _get_shared_constants() + shared_constants = tools.utils.get_cicd_shared_context() if not python_version: python_version = shared_constants["python_version"] if not relenv_version: @@ -493,7 +485,7 @@ def onedir_dependencies( if platform != "macos" and arch == "arm64": arch = "aarch64" - shared_constants = _get_shared_constants() + shared_constants = tools.utils.get_cicd_shared_context() if not python_version: python_version = shared_constants["python_version"] if not relenv_version: @@ -632,7 +624,7 @@ def salt_onedir( if platform == "darwin": platform = "macos" - shared_constants = _get_shared_constants() + shared_constants = tools.utils.get_cicd_shared_context() if not relenv_version: relenv_version = shared_constants["relenv_version"] if TYPE_CHECKING: diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index e2748a06512..27bbac95ca5 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -5,12 +5,10 @@ These commands are used for our GitHub Actions workflows. # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations -import json import logging import shutil from typing import TYPE_CHECKING, cast -import yaml from jinja2 import Environment, FileSystemLoader, StrictUndefined from ptscripts import Context, command_group @@ -20,10 +18,6 @@ log = logging.getLogger(__name__) WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" TEMPLATES = WORKFLOWS / "templates" -with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open( - "r", encoding="utf-8" -) as rfh: - AMIS = json.load(rfh) # Define the command group @@ -216,7 +210,7 @@ def generate_workflows(ctx: Context): "opensuse-15", "windows", ) - for slug in sorted(AMIS): + for slug in sorted(tools.utils.get_golden_images()): if slug.startswith(linux_skip_pkg_download_tests): continue if "arm64" in slug: @@ -251,7 +245,7 @@ def generate_workflows(ctx: Context): "photon": [], "redhat": [], } - for slug in sorted(AMIS): + for slug in sorted(tools.utils.get_golden_images()): if slug.endswith("-arm64"): continue if not slug.startswith( @@ -274,7 +268,7 @@ def generate_workflows(ctx: Context): build_rpms_listing.append((distro, release, arch)) build_debs_listing = [] - for slug in sorted(AMIS): + for slug in sorted(tools.utils.get_golden_images()): if not slug.startswith(("debian-", "ubuntu-")): continue if slug.endswith("-arm64"): @@ -335,10 +329,7 @@ def generate_workflows(ctx: Context): "windows-2022", ), } - shared_context_file = ( - tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" - ) - shared_context = yaml.safe_load(shared_context_file.read_text()) + shared_context = tools.utils.get_cicd_shared_context() for key, value in shared_context.items(): context[key] = value loaded_template = env.get_template(template_path.name) diff --git a/tools/testsuite/__init__.py b/tools/testsuite/__init__.py index 27ebe572465..f8928f68931 100644 --- a/tools/testsuite/__init__.py +++ b/tools/testsuite/__init__.py @@ -5,7 +5,6 @@ These commands are related to the test suite. # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations -import json import logging from typing import TYPE_CHECKING @@ -15,11 +14,6 @@ import tools.utils import tools.utils.gh from tools.utils import ExitCode -with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open( - "r", encoding="utf-8" -) as rfh: - OS_SLUGS = sorted(json.load(rfh)) - log = logging.getLogger(__name__) # Define the command group @@ -57,7 +51,7 @@ ts = command_group(name="ts", help="Test Suite Related Commands", description=__ "slug": { "help": "The OS slug", "required": True, - "choices": OS_SLUGS, + "choices": sorted(tools.utils.get_golden_images()), }, "pkg": { "help": "Also download package test artifacts", diff --git a/tools/testsuite/download.py b/tools/testsuite/download.py index 8c5572b07e2..7626c5d10d0 100644 --- a/tools/testsuite/download.py +++ b/tools/testsuite/download.py @@ -5,7 +5,6 @@ These commands are related to downloading test suite CI artifacts. # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations -import json import logging import pathlib from typing import TYPE_CHECKING @@ -15,11 +14,6 @@ from ptscripts import Context, command_group import tools.utils import tools.utils.gh -with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open( - "r", encoding="utf-8" -) as rfh: - OS_SLUGS = sorted(json.load(rfh)) - log = logging.getLogger(__name__) @@ -135,7 +129,7 @@ def download_nox_artifact( "slug": { "help": "The OS slug", "required": True, - "choices": OS_SLUGS, + "choices": sorted(tools.utils.get_golden_images()), }, "repository": { "help": "The repository to query, e.g. saltstack/salt", diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py index 721b7670e17..3cac1a88876 100644 --- a/tools/utils/__init__.py +++ b/tools/utils/__init__.py @@ -1,22 +1,17 @@ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated,bad-whitespace from __future__ import annotations -import fnmatch import hashlib import json import os import pathlib import shutil import sys -import tempfile -import zipfile -from datetime import datetime from enum import IntEnum -from typing import Any +from functools import cache -import boto3 import packaging.version -from botocore.exceptions import ClientError +import yaml from ptscripts import Context from rich.progress import ( BarColumn, @@ -284,3 +279,23 @@ def get_platform_and_arch_from_slug(slug: str) -> tuple[str, str]: else: arch = "x86_64" return platform, arch + + +@cache +def get_cicd_shared_context(): + """ + Return the CI/CD shared context file contents. + """ + shared_context_file = REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" + return yaml.safe_load(shared_context_file.read_text()) + + +@cache +def get_golden_images(): + """ + Return the golden images information stored on file. + """ + with REPO_ROOT.joinpath("cicd", "golden-images.json").open( + "r", encoding="utf-8" + ) as rfh: + return json.load(rfh) diff --git a/tools/vm.py b/tools/vm.py index 85aed8e0afd..b320cd10e36 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -47,10 +47,6 @@ if TYPE_CHECKING: log = logging.getLogger(__name__) STATE_DIR = tools.utils.REPO_ROOT / ".vms-state" -with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open( - "r", encoding="utf-8" -) as rfh: - AMIS = json.load(rfh) REPO_CHECKOUT_ID = hashlib.sha256( "|".join(list(platform.uname()) + [str(tools.utils.REPO_ROOT)]).encode() ).hexdigest() @@ -67,7 +63,7 @@ vm.add_argument("--region", help="The AWS region.", default=AWS_REGION) "name": { "help": "The VM Name", "metavar": "VM_NAME", - "choices": list(AMIS), + "choices": sorted(tools.utils.get_golden_images()), }, "key_name": { "help": "The SSH key name. Will default to TOOLS_KEY_NAME in environment", @@ -791,10 +787,11 @@ class VM: @config.default def _config_default(self): + golden_images = tools.utils.get_golden_images() config = AMIConfig( **{ key: value - for (key, value) in AMIS[self.name].items() + for (key, value) in golden_images[self.name].items() if key in AMIConfig.__annotations__ } ) From 7cc7595167df429b73428987e8a7cad4d1b9bcef Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 26 Mar 2024 11:56:41 +0000 Subject: [PATCH 082/102] Mandatory OS slugs are now defined in ``cicd/shared-gh-workflows-context.yml`` --- .github/workflows/ci.yml | 24 ++++++++++++------------ cicd/shared-gh-workflows-context.yml | 7 +++++++ tools/precommit/workflows.py | 8 -------- 3 files changed, 19 insertions(+), 20 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 35ba2f32933..59d15389a09 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -533,7 +533,7 @@ jobs: almalinux-9-pkg-tests: name: Alma Linux 9 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -638,7 +638,7 @@ jobs: amazonlinux-2023-arm64-pkg-tests: name: Amazon Linux 2023 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -936,7 +936,7 @@ jobs: ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -978,7 +978,7 @@ jobs: macos-13-pkg-tests: name: macOS 13 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1104,7 +1104,7 @@ jobs: windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2022", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1125,7 +1125,7 @@ jobs: windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2022", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1188,7 +1188,7 @@ jobs: windows-2022: name: Windows 2022 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2022", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1230,7 +1230,7 @@ jobs: macos-13: name: macOS 13 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1314,7 +1314,7 @@ jobs: almalinux-9: name: Alma Linux 9 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1419,7 +1419,7 @@ jobs: amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1440,7 +1440,7 @@ jobs: archlinux-lts: name: Arch Linux LTS Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["archlinux-lts", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1780,7 +1780,7 @@ jobs: ubuntu-2204-arm64: name: Ubuntu 22.04 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 42dc9883c7c..7c562fc0891 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,3 +1,10 @@ nox_version: "2022.8.7" python_version: "3.10.13" relenv_version: "0.15.1" +mandatory_os_slugs: + - almalinux-9 + - amazonlinux-2023-arm64 + - archlinux-lts + - macos-13 + - ubuntu-22.04-arm64 + - windows-2022 diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index 27bbac95ca5..14cc98022b6 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -320,14 +320,6 @@ def generate_workflows(ctx: Context): ), "build_rpms_listing": build_rpms_listing, "build_debs_listing": build_debs_listing, - "mandatory_os": ( - "almalinux-9", - "amazonlinux-2023-arm64", - "archlinux-lts", - "macos-13", - "ubuntu-22.04-arm64", - "windows-2022", - ), } shared_context = tools.utils.get_cicd_shared_context() for key, value in shared_context.items(): From 8d51ca91f2d01e98e65c0137bb3fc7532b6d3004 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 26 Mar 2024 11:55:58 +0000 Subject: [PATCH 083/102] Add tools command to sync known OS'es with GH test labels --- .github/workflows/ci.yml | 12 +- .../templates/test-salt-pkg.yml.jinja | 2 +- .../workflows/templates/test-salt.yml.jinja | 2 +- tools/__init__.py | 1 + tools/gh.py | 190 ++++++++++++++++++ 5 files changed, 199 insertions(+), 8 deletions(-) create mode 100644 tools/gh.py diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 59d15389a09..4afcf1c8041 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -957,7 +957,7 @@ jobs: macos-12-pkg-tests: name: macOS 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -978,7 +978,7 @@ jobs: macos-13-pkg-tests: name: macOS 13 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -999,7 +999,7 @@ jobs: macos-13-xlarge-pkg-tests: name: macOS 13 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "macos-13-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1209,7 +1209,7 @@ jobs: macos-12: name: macOS 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1230,7 +1230,7 @@ jobs: macos-13: name: macOS 13 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1251,7 +1251,7 @@ jobs: macos-13-xlarge: name: macOS 13 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "macos-13-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 7c37e539d75..124beaab517 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -43,7 +43,7 @@ <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "<{ slug.replace('xlarge', 'arm64') }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index 349438c362a..1f3bbf7960a 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -43,7 +43,7 @@ <%- if workflow_slug != "ci" or slug in mandatory_os %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "<{ slug.replace('xlarge', 'arm64') }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow diff --git a/tools/__init__.py b/tools/__init__.py index f5131d0e7f9..af50a06ef47 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -32,6 +32,7 @@ ptscripts.set_default_config(DEFAULT_REQS_CONFIG) ptscripts.register_tools_module("tools.changelog") ptscripts.register_tools_module("tools.ci") ptscripts.register_tools_module("tools.docs") +ptscripts.register_tools_module("tools.gh") ptscripts.register_tools_module("tools.pkg") ptscripts.register_tools_module("tools.pkg.repo") ptscripts.register_tools_module("tools.pkg.build") diff --git a/tools/gh.py b/tools/gh.py new file mode 100644 index 00000000000..fa32942263e --- /dev/null +++ b/tools/gh.py @@ -0,0 +1,190 @@ +""" +These commands are used to interact and make changes to GitHub. +""" + +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging + +from ptscripts import Context, command_group + +import tools.utils +import tools.utils.gh + +log = logging.getLogger(__name__) + +WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" +TEMPLATES = WORKFLOWS / "templates" + +# Define the command group +cgroup = command_group( + name="gh", + help="GitHub Related Commands", + description=__doc__, +) + + +@cgroup.command( + name="sync-os-labels", + arguments={ + "repository": { + "help": "Github repository.", + }, + }, +) +def sync_os_labels( + ctx: Context, repository: str = "saltstack/salt", color: str = "C2E0C6" +): + """ + Synchronize the GitHub labels to the OS known to be tested. + """ + description_prefix = "Run Tests Against" + known_os = { + "test:os:all": { + "name": "test:os:all", + "color": color, + "description": f"{description_prefix} ALL OS'es", + }, + "test:os:macos-12": { + "name": "test:os:macos-12", + "color": color, + "description": f"{description_prefix} MacOS 12", + }, + "test:os:macos-13": { + "name": "test:os:macos-13", + "color": color, + "description": f"{description_prefix} MacOS 13", + }, + "test:os:macos-13-arm64": { + "name": "test:os:macos-13-arm64", + "color": color, + "description": f"{description_prefix} MacOS 13 Arm64", + }, + } + for slug, details in tools.utils.get_golden_images().items(): + name = f"test:os:{slug}" + ami_description = ( + details["ami_description"] + .replace("CI Image of ", "") + .replace("arm64", "Arm64") + ) + known_os[name] = { + "name": name, + "color": color, + "description": f"{description_prefix} {ami_description}", + } + + ctx.info(known_os) + + github_token = tools.utils.gh.get_github_token(ctx) + if github_token is None: + ctx.error("Querying labels requires being authenticated to GitHub.") + ctx.info( + "Either set 'GITHUB_TOKEN' to a valid token, or configure the 'gh' tool such that " + "'gh auth token' returns a token." + ) + ctx.exit(1) + + existing_labels = set() + labels_to_update = [] + labels_to_delete = set() + shared_context = tools.utils.get_cicd_shared_context() + for slug in shared_context["mandatory_os_slugs"]: + label = f"test:os:{slug}" + labels_to_delete.add(label) + + headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {github_token}", + "X-GitHub-Api-Version": "2022-11-28", + } + with ctx.web as web: + web.headers.update(headers) + page = 0 + while True: + page += 1 + params = { + "per_page": 100, + "page": page, + } + ret = web.get( + f"https://api.github.com/repos/{repository}/labels", + params=params, + ) + if ret.status_code != 200: + ctx.error( + f"Failed to get the labels for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + data = ret.json() + if not data: + break + for details in data: + label = details["name"] + if not label.startswith("test:os:"): + continue + + existing_labels.add(label) + + if label not in known_os: + labels_to_delete.add(details["name"]) + continue + + if label in known_os: + update_details = known_os.pop(label) + if label in labels_to_delete: + continue + for key, value in update_details.items(): + if details[key] != value: + labels_to_update.append(update_details) + break + continue + + for slug in shared_context["mandatory_os_slugs"]: + label = f"test:os:{slug}" + if label in known_os: + labels_to_delete.add(label) + known_os.pop(label) + + if label in labels_to_update: + labels_to_delete.add(label) + known_os.pop(label) + + for label in labels_to_delete: + if label not in existing_labels: + continue + ctx.info(f"Deleting label '{label}' ...") + ret = web.delete( + f"https://api.github.com/repos/{repository}/labels/{label}", + ) + if ret.status_code != 204: + ctx.error( + f"Failed to delete label '{label}' for repository {repository!r}: {ret.reason}" + ) + + ctx.info("Updating OS Labels in GitHub...") + for details in labels_to_update: + label = details["name"] + ctx.info(f"Updating label '{label}' ...") + ret = web.patch( + f"https://api.github.com/repos/{repository}/labels/{label}", + params=details, + ) + if ret.status_code != 200: + ctx.error( + f"Failed to update label '{details['name']}' for repository {repository!r}: {ret.reason}" + ) + + for label, details in known_os.items(): + details["name"] = label + ctx.info(f"Creating label: {details} ...") + ret = web.post( + f"https://api.github.com/repos/{repository}/labels", + json=details, + ) + if ret.status_code != 201: + ctx.error( + f"Failed to create label '{details['name']}' for repository {repository!r}: {ret.reason}" + ) + print(ret.content) From d6ff5005da97f619eb60c20db53db8a67e54173e Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 26 Mar 2024 18:07:35 +0000 Subject: [PATCH 084/102] Use ``attrs`` to simplify OS definitions --- .github/workflows/build-deps-ci-action.yml | 4 +- .github/workflows/ci.yml | 144 +++--- .github/workflows/nightly.yml | 20 +- .github/workflows/scheduled.yml | 20 +- .github/workflows/staging.yml | 18 +- .../templates/build-deps-ci-action.yml.jinja | 2 +- .../test-package-downloads-action.yml.jinja | 26 +- .../templates/test-salt-pkg.yml.jinja | 51 +- .../workflows/templates/test-salt.yml.jinja | 49 +- .github/workflows/test-action-macos.yml | 6 +- .../test-package-downloads-action.yml | 6 +- .../workflows/test-packages-action-macos.yml | 6 +- .pre-commit-config.yaml | 2 +- cicd/shared-gh-workflows-context.yml | 2 +- tools/ci.py | 8 +- tools/precommit/workflows.py | 471 ++++++++++++++---- tools/utils/__init__.py | 31 ++ 17 files changed, 597 insertions(+), 269 deletions(-) diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml index b16ec5e6ee8..58ef83be4f1 100644 --- a/.github/workflows/build-deps-ci-action.yml +++ b/.github/workflows/build-deps-ci-action.yml @@ -163,7 +163,7 @@ jobs: macos-dependencies: name: MacOS - runs-on: ${{ matrix.distro-slug }} + runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} timeout-minutes: 90 strategy: fail-fast: false @@ -171,7 +171,7 @@ jobs: include: - distro-slug: macos-12 arch: x86_64 - - distro-slug: macos-13-xlarge + - distro-slug: macos-13-arm64 arch: arm64 steps: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4afcf1c8041..0b5b9a5f8bc 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -491,7 +491,7 @@ jobs: almalinux-8-pkg-tests: name: Alma Linux 8 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -512,7 +512,7 @@ jobs: almalinux-8-arm64-pkg-tests: name: Alma Linux 8 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -533,7 +533,7 @@ jobs: almalinux-9-pkg-tests: name: Alma Linux 9 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-pkgs-onedir @@ -554,7 +554,7 @@ jobs: almalinux-9-arm64-pkg-tests: name: Alma Linux 9 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-9-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -575,7 +575,7 @@ jobs: amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -596,7 +596,7 @@ jobs: amazonlinux-2-arm64-pkg-tests: name: Amazon Linux 2 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -617,7 +617,7 @@ jobs: amazonlinux-2023-pkg-tests: name: Amazon Linux 2023 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2023"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -638,7 +638,7 @@ jobs: amazonlinux-2023-arm64-pkg-tests: name: Amazon Linux 2023 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-pkgs-onedir @@ -659,7 +659,7 @@ jobs: centos-7-pkg-tests: name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["centos-7", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "centos-7"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -680,7 +680,7 @@ jobs: debian-10-pkg-tests: name: Debian 10 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-10", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-10"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -701,7 +701,7 @@ jobs: debian-11-pkg-tests: name: Debian 11 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -722,7 +722,7 @@ jobs: debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -743,7 +743,7 @@ jobs: debian-12-pkg-tests: name: Debian 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -764,7 +764,7 @@ jobs: debian-12-arm64-pkg-tests: name: Debian 12 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -785,7 +785,7 @@ jobs: photonos-4-pkg-tests: name: Photon OS 4 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -807,7 +807,7 @@ jobs: photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -829,7 +829,7 @@ jobs: photonos-5-pkg-tests: name: Photon OS 5 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -851,7 +851,7 @@ jobs: photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -873,7 +873,7 @@ jobs: ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -894,7 +894,7 @@ jobs: ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -915,7 +915,7 @@ jobs: ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-22.04"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -936,7 +936,7 @@ jobs: ubuntu-2204-arm64-pkg-tests: name: Ubuntu 22.04 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-pkgs-onedir @@ -957,7 +957,7 @@ jobs: macos-12-pkg-tests: name: macOS 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-12"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -965,6 +965,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -978,7 +979,7 @@ jobs: macos-13-pkg-tests: name: macOS 13 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-13"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -986,6 +987,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -997,16 +999,17 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-13-xlarge-pkg-tests: + macos-13-arm64-pkg-tests: name: macOS 13 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "macos-13-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - build-pkgs-onedir - build-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1020,7 +1023,7 @@ jobs: windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2016", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2016"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1041,7 +1044,7 @@ jobs: windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2016", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2016"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1062,7 +1065,7 @@ jobs: windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2019", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2019"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1083,7 +1086,7 @@ jobs: windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2019", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2019"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-pkgs-onedir @@ -1104,7 +1107,7 @@ jobs: windows-2022-nsis-pkg-tests: name: Windows 2022 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2022", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-pkgs-onedir @@ -1125,7 +1128,7 @@ jobs: windows-2022-msi-pkg-tests: name: Windows 2022 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2022", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-pkgs-onedir @@ -1146,7 +1149,7 @@ jobs: windows-2016: name: Windows 2016 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2016", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2016"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1167,7 +1170,7 @@ jobs: windows-2019: name: Windows 2019 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2019", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2019"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1188,7 +1191,7 @@ jobs: windows-2022: name: Windows 2022 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["windows-2022", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps @@ -1209,13 +1212,14 @@ jobs: macos-12: name: macOS 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-12", "macos-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-12"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1230,13 +1234,14 @@ jobs: macos-13: name: macOS 13 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13", "macos-13", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-13"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1249,15 +1254,16 @@ jobs: workflow-slug: ci default-timeout: 180 - macos-13-xlarge: + macos-13-arm64: name: macOS 13 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["macos-13-xlarge", "macos-13-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1272,7 +1278,7 @@ jobs: almalinux-8: name: Alma Linux 8 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1293,7 +1299,7 @@ jobs: almalinux-8-arm64: name: Alma Linux 8 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-8-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1314,7 +1320,7 @@ jobs: almalinux-9: name: Alma Linux 9 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps @@ -1335,7 +1341,7 @@ jobs: almalinux-9-arm64: name: Alma Linux 9 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["almalinux-9-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-9-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1356,7 +1362,7 @@ jobs: amazonlinux-2: name: Amazon Linux 2 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1377,7 +1383,7 @@ jobs: amazonlinux-2-arm64: name: Amazon Linux 2 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1398,7 +1404,7 @@ jobs: amazonlinux-2023: name: Amazon Linux 2023 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2023"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1419,7 +1425,7 @@ jobs: amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["amazonlinux-2023-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps @@ -1440,7 +1446,7 @@ jobs: archlinux-lts: name: Arch Linux LTS Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["archlinux-lts", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps @@ -1461,7 +1467,7 @@ jobs: centos-7: name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["centos-7", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "centos-7"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1482,7 +1488,7 @@ jobs: debian-10: name: Debian 10 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-10", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-10"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1503,7 +1509,7 @@ jobs: debian-11: name: Debian 11 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1524,7 +1530,7 @@ jobs: debian-11-arm64: name: Debian 11 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-11-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1545,7 +1551,7 @@ jobs: debian-12: name: Debian 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1566,7 +1572,7 @@ jobs: debian-12-arm64: name: Debian 12 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["debian-12-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1587,7 +1593,7 @@ jobs: fedora-39: name: Fedora 39 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["fedora-39", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "fedora-39"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1608,7 +1614,7 @@ jobs: opensuse-15: name: Opensuse 15 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["opensuse-15", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "opensuse-15"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1629,7 +1635,7 @@ jobs: photonos-4: name: Photon OS 4 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1651,7 +1657,7 @@ jobs: photonos-4-arm64: name: Photon OS 4 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-4-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1673,7 +1679,7 @@ jobs: photonos-5: name: Photon OS 5 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1695,7 +1701,7 @@ jobs: photonos-5-arm64: name: Photon OS 5 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["photonos-5-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1717,7 +1723,7 @@ jobs: ubuntu-2004: name: Ubuntu 20.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1738,7 +1744,7 @@ jobs: ubuntu-2004-arm64: name: Ubuntu 20.04 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-20.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1759,7 +1765,7 @@ jobs: ubuntu-2204: name: Ubuntu 22.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-22.04"]'), needs.prepare-workflow.outputs.os-labels) }} needs: - prepare-workflow - build-ci-deps @@ -1780,7 +1786,7 @@ jobs: ubuntu-2204-arm64: name: Ubuntu 22.04 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["ubuntu-22.04-arm64", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps @@ -1811,7 +1817,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -1965,7 +1971,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -2015,7 +2021,7 @@ jobs: - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests - macos-13-pkg-tests - - macos-13-xlarge-pkg-tests + - macos-13-arm64-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 95c0332458e..1ff3ccb7db5 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1022,6 +1022,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1043,6 +1044,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1054,7 +1056,7 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-13-xlarge-pkg-tests: + macos-13-arm64-pkg-tests: name: macOS 13 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: @@ -1063,7 +1065,8 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1273,6 +1276,7 @@ jobs: uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1294,6 +1298,7 @@ jobs: uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1306,7 +1311,7 @@ jobs: workflow-slug: nightly default-timeout: 360 - macos-13-xlarge: + macos-13-arm64: name: macOS 13 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: @@ -1314,7 +1319,8 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1868,7 +1874,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -2778,7 +2784,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -2886,7 +2892,7 @@ jobs: - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests - macos-13-pkg-tests - - macos-13-xlarge-pkg-tests + - macos-13-arm64-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index c1108976d7b..524ffc4613b 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -999,6 +999,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1020,6 +1021,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1031,7 +1033,7 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-13-xlarge-pkg-tests: + macos-13-arm64-pkg-tests: name: macOS 13 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: @@ -1040,7 +1042,8 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1250,6 +1253,7 @@ jobs: uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1271,6 +1275,7 @@ jobs: uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1283,7 +1288,7 @@ jobs: workflow-slug: scheduled default-timeout: 360 - macos-13-xlarge: + macos-13-arm64: name: macOS 13 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: @@ -1291,7 +1296,8 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1845,7 +1851,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -2001,7 +2007,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -2051,7 +2057,7 @@ jobs: - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests - macos-13-pkg-tests - - macos-13-xlarge-pkg-tests + - macos-13-arm64-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 0961f55aa8c..3812b70ea9e 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1012,6 +1012,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1033,6 +1034,7 @@ jobs: uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1044,7 +1046,7 @@ jobs: skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - macos-13-xlarge-pkg-tests: + macos-13-arm64-pkg-tests: name: macOS 13 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: @@ -1053,7 +1055,8 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -1263,6 +1266,7 @@ jobs: uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 + runner: macos-12 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1284,6 +1288,7 @@ jobs: uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-13 + runner: macos-13 nox-session: ci-test-onedir platform: macos arch: x86_64 @@ -1296,7 +1301,7 @@ jobs: workflow-slug: staging default-timeout: 180 - macos-13-xlarge: + macos-13-arm64: name: macOS 13 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: @@ -1304,7 +1309,8 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: - distro-slug: macos-13-xlarge + distro-slug: macos-13-arm64 + runner: macos-13-xlarge nox-session: ci-test-onedir platform: macos arch: arm64 @@ -2757,7 +2763,7 @@ jobs: - windows-2022 - macos-12 - macos-13 - - macos-13-xlarge + - macos-13-arm64 - almalinux-8 - almalinux-8-arm64 - almalinux-9 @@ -2807,7 +2813,7 @@ jobs: - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests - macos-13-pkg-tests - - macos-13-xlarge-pkg-tests + - macos-13-arm64-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests diff --git a/.github/workflows/templates/build-deps-ci-action.yml.jinja b/.github/workflows/templates/build-deps-ci-action.yml.jinja index de6dc03ba3c..a08f02b0d3d 100644 --- a/.github/workflows/templates/build-deps-ci-action.yml.jinja +++ b/.github/workflows/templates/build-deps-ci-action.yml.jinja @@ -163,7 +163,7 @@ jobs: macos-dependencies: name: MacOS - runs-on: ${{ matrix.distro-slug }} + runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} timeout-minutes: 90 strategy: fail-fast: false diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja index ee5f5fbfc1b..469509e979f 100644 --- a/.github/workflows/templates/test-package-downloads-action.yml.jinja +++ b/.github/workflows/templates/test-package-downloads-action.yml.jinja @@ -69,10 +69,10 @@ jobs: fail-fast: false matrix: include: - <%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["linux"] %> - - distro-slug: <{ slug }> - arch: <{ arch }> - pkg-type: <{ pkg_type }> + <%- for os in test_salt_pkg_downloads_listing["linux"] %> + - distro-slug: <{ os.slug }> + arch: <{ os.arch }> + pkg-type: <{ os.pkg_type }> <%- endfor %> steps: @@ -271,7 +271,7 @@ jobs: macos: name: MacOS - runs-on: ${{ matrix.distro-slug }} + runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} env: USE_S3_CACHE: 'false' environment: ${{ inputs.environment }} @@ -280,10 +280,10 @@ jobs: fail-fast: false matrix: include: - <%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["macos"] %> - - distro-slug: <{ slug }> - arch: <{ arch }> - pkg-type: <{ pkg_type }> + <%- for os in test_salt_pkg_downloads_listing["macos"] %> + - distro-slug: <{ os.slug }> + arch: <{ os.arch }> + pkg-type: <{ os.pkg_type }> <%- endfor %> steps: @@ -485,10 +485,10 @@ jobs: fail-fast: false matrix: include: - <%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["windows"] %> - - distro-slug: <{ slug }> - arch: <{ arch }> - pkg-type: <{ pkg_type }> + <%- for os in test_salt_pkg_downloads_listing["windows"] %> + - distro-slug: <{ os.slug }> + arch: <{ os.arch }> + pkg-type: <{ os.pkg_type }> <%- endfor %> steps: diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 124beaab517..d42155e552f 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -1,13 +1,13 @@ - <%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %> - <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> + <%- for os in test_salt_pkg_listing["linux"] %> + <%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> Package Test - <%- if workflow_slug != "ci" or slug in mandatory_os %> + name: <{ os.display_name }> Package Test + <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow @@ -15,18 +15,18 @@ - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: <{ slug }> + distro-slug: <{ os.slug }> nox-session: ci-test-onedir platform: linux - arch: <{ arch }> + arch: <{ os.arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: <{ pkg_type }> + pkg-type: <{ os.pkg_type }> nox-version: <{ nox_version }> python-version: "<{ gh_actions_workflows_python_version }>" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - <%- if fips == "fips" %> + <%- if os.fips %> fips: true <%- endif %> @@ -34,16 +34,16 @@ - <%- for slug, display_name, arch in test_salt_pkg_listing["macos"] %> - <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> + <%- for os in test_salt_pkg_listing["macos"] %> + <%- set job_name = "{}-pkg-tests".format(os.slug.replace(".", "")) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> Package Test - <%- if workflow_slug != "ci" or slug in mandatory_os %> + name: <{ os.display_name }> Package Test + <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "<{ slug.replace('xlarge', 'arm64') }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow @@ -51,10 +51,11 @@ - build-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: - distro-slug: <{ slug }> + distro-slug: <{ os.slug }> + runner: <{ os.runner }> nox-session: ci-test-onedir platform: macos - arch: <{ arch }> + arch: <{ os.arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos nox-version: <{ nox_version }> @@ -66,17 +67,16 @@ <%- endfor %> - <%- for slug, display_name, arch in test_salt_pkg_listing["windows"] %> - <%- for pkg_type in ("NSIS", "MSI") %> - <%- set job_name = "{}-{}-pkg-tests".format(slug.replace(".", ""), pkg_type.lower()) %> + <%- for os in test_salt_pkg_listing["windows"] %> + <%- set job_name = "{}-{}-pkg-tests".format(os.slug.replace(".", ""), os.pkg_type.lower()) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> <{ pkg_type }> Package Test - <%- if workflow_slug != "ci" or slug in mandatory_os %> + name: <{ os.display_name }> <{ os.pkg_type }> Package Test + <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow @@ -84,17 +84,16 @@ - build-ci-deps uses: ./.github/workflows/test-packages-action-windows.yml with: - distro-slug: <{ slug }> + distro-slug: <{ os.slug }> nox-session: ci-test-onedir platform: windows - arch: <{ arch }> + arch: <{ os.arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: <{ pkg_type }> + pkg-type: <{ os.pkg_type }> nox-version: <{ nox_version }> python-version: "<{ gh_actions_workflows_python_version }>" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - <%- endfor %> <%- endfor %> diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index 1f3bbf7960a..c3ea4eee146 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -4,22 +4,22 @@ <%- set timeout_value = 180 %> <%- endif %> - <%- for slug, display_name, arch in test_salt_listing["windows"] %> + <%- for os in test_salt_listing["windows"] %> - <{ slug.replace(".", "") }>: - <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> Test - <%- if workflow_slug != "ci" or slug in mandatory_os %> + <{ os.slug.replace(".", "") }>: + <%- do test_salt_needs.append(os.slug.replace(".", "")) %> + name: <{ os.display_name }> Test + <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-windows.yml with: - distro-slug: <{ slug }> + distro-slug: <{ os.slug }> nox-session: ci-test-onedir platform: windows arch: amd64 @@ -35,25 +35,26 @@ <%- endfor %> - <%- for slug, display_name, arch in test_salt_listing["macos"] %> + <%- for os in test_salt_listing["macos"] %> - <{ slug.replace(".", "") }>: - <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> Test - <%- if workflow_slug != "ci" or slug in mandatory_os %> + <{ os.slug.replace(".", "") }>: + <%- do test_salt_needs.append(os.slug.replace(".", "")) %> + name: <{ os.display_name }> Test + <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["<{ slug }>", "<{ slug.replace('xlarge', 'arm64') }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-macos.yml with: - distro-slug: <{ slug }> + distro-slug: <{ os.slug }> + runner: <{ os.runner }> nox-session: ci-test-onedir platform: macos - arch: <{ arch }> + arch: <{ os.arch }> nox-version: <{ nox_version }> gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} @@ -65,25 +66,25 @@ <%- endfor %> - <%- for slug, display_name, arch, fips in test_salt_listing["linux"] %> + <%- for os in test_salt_listing["linux"] %> - <{ slug.replace(".", "") }>: - <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> Test - <%- if workflow_slug != "ci" or slug in mandatory_os %> + <{ os.slug.replace(".", "") }>: + <%- do test_salt_needs.append(os.slug.replace(".", "")) %> + name: <{ os.display_name }> Test + <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["<{ slug }>", "all"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} <%- endif %> needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: <{ slug }> + distro-slug: <{ os.slug }> nox-session: ci-test-onedir platform: linux - arch: <{ arch }> + arch: <{ os.arch }> nox-version: <{ nox_version }> gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} @@ -92,7 +93,7 @@ skip-code-coverage: <{ skip_test_coverage_check }> workflow-slug: <{ workflow_slug }> default-timeout: <{ timeout_value }> - <%- if fips == "fips" %> + <%- if os.fips %> fips: true <%- endif %> diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 24465a724c0..6b03a6cb558 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -8,6 +8,10 @@ on: required: true type: string description: The OS slug to run tests against + runner: + required: true + type: string + description: The GitHub runner name nox-session: required: true type: string @@ -98,7 +102,7 @@ jobs: test: name: Test - runs-on: ${{ inputs.distro-slug }} + runs-on: ${{ inputs.runner }} # Full test runs. Each chunk should never take more than 2 hours. # Partial test runs(no chunk parallelization), 6 Hours timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index dc3fb12a661..e4bd6af0fd8 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -380,7 +380,7 @@ jobs: macos: name: MacOS - runs-on: ${{ matrix.distro-slug }} + runs-on: ${{ matrix.distro-slug == 'macos-13-arm64' && 'macos-13-xlarge' || matrix.distro-slug }} env: USE_S3_CACHE: 'false' environment: ${{ inputs.environment }} @@ -395,10 +395,10 @@ jobs: - distro-slug: macos-13 arch: x86_64 pkg-type: package - - distro-slug: macos-13-xlarge + - distro-slug: macos-13-arm64 arch: arm64 pkg-type: package - - distro-slug: macos-13-xlarge + - distro-slug: macos-13-arm64 arch: arm64 pkg-type: onedir diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 2b6b2e9ca90..611d84e22e7 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -7,6 +7,10 @@ on: required: true type: string description: The OS slug to run tests against + runner: + required: true + type: string + description: The GitHub runner name platform: required: true type: string @@ -98,7 +102,7 @@ jobs: test: name: Test - runs-on: ${{ inputs.distro-slug }} + runs-on: ${{ inputs.runner }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - generate-matrix diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5d338177f40..68821fd8ba5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -59,7 +59,7 @@ repos: - id: tools alias: generate-workflows name: Generate GitHub Workflow Templates - files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/templates/.*)$ + files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/.*)$ pass_filenames: false args: - pre-commit diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 7c562fc0891..2966a250604 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -5,6 +5,6 @@ mandatory_os_slugs: - almalinux-9 - amazonlinux-2023-arm64 - archlinux-lts - - macos-13 + - macos-13-arm64 - ubuntu-22.04-arm64 - windows-2022 diff --git a/tools/ci.py b/tools/ci.py index 491df8019ed..ed1c0416630 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -968,7 +968,13 @@ def get_pr_test_labels( if TYPE_CHECKING: assert github_output is not None - ctx.info("Writing 'labels' to the github outputs file") + ctx.info("Writing 'labels' to the github outputs file...") + ctx.info("Test Labels:") + for label in sorted(test_labels): + ctx.info(f" * [yellow]{label}[/yellow]") + ctx.info("* OS Labels:") + for slug in sorted(selected): + ctx.info(f" * [yellow]{slug}[/yellow]") with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"os-labels={json.dumps([label for label in os_labels])}\n") wfh.write(f"test-labels={json.dumps([label for label in test_labels])}\n") diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index 14cc98022b6..3c8b032ad05 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -7,12 +7,19 @@ from __future__ import annotations import logging import shutil +import sys from typing import TYPE_CHECKING, cast from jinja2 import Environment, FileSystemLoader, StrictUndefined from ptscripts import Context, command_group import tools.utils +from tools.utils import Linux, MacOS, Windows + +if sys.version_info < (3, 11): + from typing_extensions import TypedDict +else: + from typing import TypedDict # pylint: disable=no-name-in-module log = logging.getLogger(__name__) @@ -20,6 +27,12 @@ WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" TEMPLATES = WORKFLOWS / "templates" +class PlatformDefinitions(TypedDict): + linux: list[Linux] + macos: list[MacOS] + windows: list[Windows] + + # Define the command group cgroup = command_group( name="workflows", @@ -91,94 +104,299 @@ def generate_workflows(ctx: Context): "template": "build-deps-ci-action.yml", }, } - test_salt_listing: dict[str, list[tuple[str, ...]]] = { - "linux": [ - ("almalinux-8", "Alma Linux 8", "x86_64", "no-fips"), - ("almalinux-8-arm64", "Alma Linux 8 Arm64", "arm64", "no-fips"), - ("almalinux-9", "Alma Linux 9", "x86_64", "no-fips"), - ("almalinux-9-arm64", "Alma Linux 9 Arm64", "arm64", "no-fips"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64", "no-fips"), - ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "arm64", "no-fips"), - ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "no-fips"), - ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "arm64", "no-fips"), - ("archlinux-lts", "Arch Linux LTS", "x86_64", "no-fips"), - ("centos-7", "CentOS 7", "x86_64", "no-fips"), - ("debian-10", "Debian 10", "x86_64", "no-fips"), - ("debian-11", "Debian 11", "x86_64", "no-fips"), - ("debian-11-arm64", "Debian 11 Arm64", "arm64", "no-fips"), - ("debian-12", "Debian 12", "x86_64", "no-fips"), - ("debian-12-arm64", "Debian 12 Arm64", "arm64", "no-fips"), - ("fedora-39", "Fedora 39", "x86_64", "no-fips"), - ("opensuse-15", "Opensuse 15", "x86_64", "no-fips"), - ("photonos-4", "Photon OS 4", "x86_64", "fips"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "arm64", "fips"), - ("photonos-5", "Photon OS 5", "x86_64", "fips"), - ("photonos-5-arm64", "Photon OS 5 Arm64", "arm64", "fips"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "no-fips"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "arm64", "no-fips"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "no-fips"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "arm64", "no-fips"), - ], - "macos": [ - ("macos-12", "macOS 12", "x86_64"), - ("macos-13", "macOS 13", "x86_64"), - ("macos-13-xlarge", "macOS 13 Arm64", "arm64"), - ], - "windows": [ - ("windows-2016", "Windows 2016", "amd64"), - ("windows-2019", "Windows 2019", "amd64"), - ("windows-2022", "Windows 2022", "amd64"), - ], - } + test_salt_listing = PlatformDefinitions( + { + "linux": [ + Linux(slug="almalinux-8", display_name="Alma Linux 8", arch="x86_64"), + Linux( + slug="almalinux-8-arm64", + display_name="Alma Linux 8 Arm64", + arch="arm64", + ), + Linux(slug="almalinux-9", display_name="Alma Linux 9", arch="x86_64"), + Linux( + slug="almalinux-9-arm64", + display_name="Alma Linux 9 Arm64", + arch="arm64", + ), + Linux( + slug="amazonlinux-2", display_name="Amazon Linux 2", arch="x86_64" + ), + Linux( + slug="amazonlinux-2-arm64", + display_name="Amazon Linux 2 Arm64", + arch="arm64", + ), + Linux( + slug="amazonlinux-2023", + display_name="Amazon Linux 2023", + arch="x86_64", + ), + Linux( + slug="amazonlinux-2023-arm64", + display_name="Amazon Linux 2023 Arm64", + arch="arm64", + ), + Linux( + slug="archlinux-lts", display_name="Arch Linux LTS", arch="x86_64" + ), + Linux(slug="centos-7", display_name="CentOS 7", arch="x86_64"), + Linux(slug="debian-10", display_name="Debian 10", arch="x86_64"), + Linux(slug="debian-11", display_name="Debian 11", arch="x86_64"), + Linux( + slug="debian-11-arm64", display_name="Debian 11 Arm64", arch="arm64" + ), + Linux(slug="debian-12", display_name="Debian 12", arch="x86_64"), + Linux( + slug="debian-12-arm64", display_name="Debian 12 Arm64", arch="arm64" + ), + Linux(slug="fedora-39", display_name="Fedora 39", arch="x86_64"), + Linux(slug="opensuse-15", display_name="Opensuse 15", arch="x86_64"), + Linux( + slug="photonos-4", + display_name="Photon OS 4", + arch="x86_64", + fips=True, + ), + Linux( + slug="photonos-4-arm64", + display_name="Photon OS 4 Arm64", + arch="arm64", + fips=True, + ), + Linux( + slug="photonos-5", + display_name="Photon OS 5", + arch="x86_64", + fips=True, + ), + Linux( + slug="photonos-5-arm64", + display_name="Photon OS 5 Arm64", + arch="arm64", + fips=True, + ), + Linux(slug="ubuntu-20.04", display_name="Ubuntu 20.04", arch="x86_64"), + Linux( + slug="ubuntu-20.04-arm64", + display_name="Ubuntu 20.04 Arm64", + arch="arm64", + ), + Linux(slug="ubuntu-22.04", display_name="Ubuntu 22.04", arch="x86_64"), + Linux( + slug="ubuntu-22.04-arm64", + display_name="Ubuntu 22.04 Arm64", + arch="arm64", + ), + ], + "macos": [ + MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"), + MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"), + MacOS( + slug="macos-13-arm64", + display_name="macOS 13 Arm64", + arch="arm64", + runner="macos-13-xlarge", + ), + ], + "windows": [ + Windows(slug="windows-2016", display_name="Windows 2016", arch="amd64"), + Windows(slug="windows-2019", display_name="Windows 2019", arch="amd64"), + Windows(slug="windows-2022", display_name="Windows 2022", arch="amd64"), + ], + } + ) - test_salt_pkg_listing = { - "linux": [ - ("almalinux-8", "Alma Linux 8", "x86_64", "rpm", "no-fips"), - ("almalinux-8-arm64", "Alma Linux 8 Arm64", "arm64", "rpm", "no-fips"), - ("almalinux-9", "Alma Linux 9", "x86_64", "rpm", "no-fips"), - ("almalinux-9-arm64", "Alma Linux 9 Arm64", "arm64", "rpm", "no-fips"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm", "no-fips"), - ( - "amazonlinux-2-arm64", - "Amazon Linux 2 Arm64", - "arm64", - "rpm", - "no-fips", - ), - ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm", "no-fips"), - ( - "amazonlinux-2023-arm64", - "Amazon Linux 2023 Arm64", - "arm64", - "rpm", - "no-fips", - ), - ("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"), - ("debian-10", "Debian 10", "x86_64", "deb", "no-fips"), - ("debian-11", "Debian 11", "x86_64", "deb", "no-fips"), - ("debian-11-arm64", "Debian 11 Arm64", "arm64", "deb", "no-fips"), - ("debian-12", "Debian 12", "x86_64", "deb", "no-fips"), - ("debian-12-arm64", "Debian 12 Arm64", "arm64", "deb", "no-fips"), - ("photonos-4", "Photon OS 4", "x86_64", "rpm", "fips"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "arm64", "rpm", "fips"), - ("photonos-5", "Photon OS 5", "x86_64", "rpm", "fips"), - ("photonos-5-arm64", "Photon OS 5 Arm64", "arm64", "rpm", "fips"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb", "no-fips"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "arm64", "deb", "no-fips"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb", "no-fips"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "arm64", "deb", "no-fips"), - ], - "macos": [ - ("macos-12", "macOS 12", "x86_64"), - ("macos-13", "macOS 13", "x86_64"), - ("macos-13-xlarge", "macOS 13 Arm64", "arm64"), - ], - "windows": [ - ("windows-2016", "Windows 2016", "amd64"), - ("windows-2019", "Windows 2019", "amd64"), - ("windows-2022", "Windows 2022", "amd64"), - ], - } + test_salt_pkg_listing = PlatformDefinitions( + { + "linux": [ + Linux( + slug="almalinux-8", + display_name="Alma Linux 8", + arch="x86_64", + pkg_type="rpm", + ), + Linux( + slug="almalinux-8-arm64", + display_name="Alma Linux 8 Arm64", + arch="arm64", + pkg_type="rpm", + ), + Linux( + slug="almalinux-9", + display_name="Alma Linux 9", + arch="x86_64", + pkg_type="rpm", + ), + Linux( + slug="almalinux-9-arm64", + display_name="Alma Linux 9 Arm64", + arch="arm64", + pkg_type="rpm", + ), + Linux( + slug="amazonlinux-2", + display_name="Amazon Linux 2", + arch="x86_64", + pkg_type="rpm", + ), + Linux( + slug="amazonlinux-2-arm64", + display_name="Amazon Linux 2 Arm64", + arch="arm64", + pkg_type="rpm", + ), + Linux( + slug="amazonlinux-2023", + display_name="Amazon Linux 2023", + arch="x86_64", + pkg_type="rpm", + ), + Linux( + slug="amazonlinux-2023-arm64", + display_name="Amazon Linux 2023 Arm64", + arch="arm64", + pkg_type="rpm", + ), + Linux( + slug="centos-7", + display_name="CentOS 7", + arch="x86_64", + pkg_type="rpm", + ), + Linux( + slug="debian-10", + display_name="Debian 10", + arch="x86_64", + pkg_type="deb", + ), + Linux( + slug="debian-11", + display_name="Debian 11", + arch="x86_64", + pkg_type="deb", + ), + Linux( + slug="debian-11-arm64", + display_name="Debian 11 Arm64", + arch="arm64", + pkg_type="deb", + ), + Linux( + slug="debian-12", + display_name="Debian 12", + arch="x86_64", + pkg_type="deb", + ), + Linux( + slug="debian-12-arm64", + display_name="Debian 12 Arm64", + arch="arm64", + pkg_type="deb", + ), + Linux( + slug="photonos-4", + display_name="Photon OS 4", + arch="x86_64", + pkg_type="rpm", + fips=True, + ), + Linux( + slug="photonos-4-arm64", + display_name="Photon OS 4 Arm64", + arch="arm64", + pkg_type="rpm", + fips=True, + ), + Linux( + slug="photonos-5", + display_name="Photon OS 5", + arch="x86_64", + pkg_type="rpm", + fips=True, + ), + Linux( + slug="photonos-5-arm64", + display_name="Photon OS 5 Arm64", + arch="arm64", + pkg_type="rpm", + fips=True, + ), + Linux( + slug="ubuntu-20.04", + display_name="Ubuntu 20.04", + arch="x86_64", + pkg_type="deb", + ), + Linux( + slug="ubuntu-20.04-arm64", + display_name="Ubuntu 20.04 Arm64", + arch="arm64", + pkg_type="deb", + ), + Linux( + slug="ubuntu-22.04", + display_name="Ubuntu 22.04", + arch="x86_64", + pkg_type="deb", + ), + Linux( + slug="ubuntu-22.04-arm64", + display_name="Ubuntu 22.04 Arm64", + arch="arm64", + pkg_type="deb", + ), + ], + "macos": [ + MacOS(slug="macos-12", display_name="macOS 12", arch="x86_64"), + MacOS(slug="macos-13", display_name="macOS 13", arch="x86_64"), + MacOS( + slug="macos-13-arm64", + display_name="macOS 13 Arm64", + arch="arm64", + runner="macos-13-xlarge", + ), + ], + "windows": [ + Windows( + slug="windows-2016", + display_name="Windows 2016", + arch="amd64", + pkg_type="NSIS", + ), + Windows( + slug="windows-2016", + display_name="Windows 2016", + arch="amd64", + pkg_type="MSI", + ), + Windows( + slug="windows-2019", + display_name="Windows 2019", + arch="amd64", + pkg_type="NSIS", + ), + Windows( + slug="windows-2019", + display_name="Windows 2019", + arch="amd64", + pkg_type="MSI", + ), + Windows( + slug="windows-2022", + display_name="Windows 2022", + arch="amd64", + pkg_type="NSIS", + ), + Windows( + slug="windows-2022", + display_name="Windows 2022", + arch="amd64", + pkg_type="MSI", + ), + ], + } + ) build_ci_deps_listing = { "linux": [ @@ -187,17 +405,19 @@ def generate_workflows(ctx: Context): ], "macos": [ ("x86_64", "macos-12"), - ("arm64", "macos-13-xlarge"), + ("arm64", "macos-13-arm64"), ], "windows": [ ("amd64", "windows-2022"), ], } - test_salt_pkg_downloads_listing: dict[str, list[tuple[str, str, str]]] = { - "linux": [], - "macos": [], - "windows": [], - } + test_salt_pkg_downloads_listing = PlatformDefinitions( + { + "linux": [], + "macos": [], + "windows": [], + } + ) rpm_slugs = ( "almalinux", "amazonlinux", @@ -220,23 +440,62 @@ def generate_workflows(ctx: Context): if slug.startswith(rpm_slugs) and arch == "arm64": # While we maintain backwards compatible urls test_salt_pkg_downloads_listing["linux"].append( - (slug, "aarch64", "package") + Linux( + slug=slug, + arch="aarch64", + pkg_type="package", + ) ) - test_salt_pkg_downloads_listing["linux"].append((slug, arch, "package")) + test_salt_pkg_downloads_listing["linux"].append( + Linux( + slug=slug, + arch=arch, + pkg_type="package", + ) + ) if slug.startswith("ubuntu-22"): - test_salt_pkg_downloads_listing["linux"].append((slug, arch, "onedir")) - for slug, display_name, arch in test_salt_listing["macos"]: - test_salt_pkg_downloads_listing["macos"].append((slug, arch, "package")) - for slug, display_name, arch in test_salt_listing["macos"][-1:]: - test_salt_pkg_downloads_listing["macos"].append((slug, arch, "onedir")) - for slug, display_name, arch in test_salt_listing["windows"][-1:]: + test_salt_pkg_downloads_listing["linux"].append( + Linux( + slug=slug, + arch=arch, + pkg_type="onedir", + ) + ) + for mac in test_salt_listing["macos"]: + test_salt_pkg_downloads_listing["macos"].append( + MacOS( + slug=mac.slug, + arch=mac.arch, + display_name=mac.display_name, + pkg_type="package", + runner=mac.runner, + ) + ) + for mac in test_salt_listing["macos"][-1:]: + test_salt_pkg_downloads_listing["macos"].append( + MacOS( + slug=mac.slug, + arch=mac.arch, + display_name=mac.display_name, + pkg_type="onedir", + runner=mac.runner, + ) + ) + for win in test_salt_listing["windows"][-1:]: for pkg_type in ("nsis", "msi", "onedir"): - test_salt_pkg_downloads_listing["windows"].append((slug, arch, pkg_type)) + test_salt_pkg_downloads_listing["windows"].append( + Windows( + slug=win.slug, + arch=win.arch, + display_name=win.display_name, + pkg_type=pkg_type, + ) + ) - test_salt_pkg_downloads_needs_slugs = set() - for platform in test_salt_pkg_downloads_listing: - for _, arch, _ in test_salt_pkg_downloads_listing[platform]: - test_salt_pkg_downloads_needs_slugs.add("build-ci-deps") + test_salt_pkg_downloads_needs_slugs = {"build-ci-deps"} + # for platform in test_salt_pkg_downloads_listing: + # for _, arch, _ in test_salt_pkg_downloads_listing[platform]: + # test_salt_pkg_downloads_needs_slugs.add("build-ci-deps") build_rpms_listing = [] rpm_os_versions: dict[str, list[str]] = { diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py index 3cac1a88876..3635c82d05b 100644 --- a/tools/utils/__init__.py +++ b/tools/utils/__init__.py @@ -10,6 +10,7 @@ import sys from enum import IntEnum from functools import cache +import attr import packaging.version import yaml from ptscripts import Context @@ -37,6 +38,36 @@ class ExitCode(IntEnum): SOFT_FAIL = 2 +@attr.s(frozen=True, slots=True) +class OS: + platform: str = attr.ib() + slug: str = attr.ib() + display_name: str = attr.ib(default=None) + arch: str = attr.ib(default=None) + pkg_type: str = attr.ib(default=None) + + +@attr.s(frozen=True, slots=True) +class Linux(OS): + platform: str = attr.ib(default="linux") + fips: bool = attr.ib(default=False) + + +@attr.s(frozen=True, slots=True) +class MacOS(OS): + runner: str = attr.ib() + platform: str = attr.ib(default="macos") + + @runner.default + def _default_runner(self): + return self.slug + + +@attr.s(frozen=True, slots=True) +class Windows(OS): + platform: str = attr.ib(default="windows") + + def create_progress_bar(file_progress: bool = False, **kwargs): if file_progress: return Progress( From 7c5125a8e3760426992ca56b8ed3f9fe6af130fe Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 16:50:05 +0000 Subject: [PATCH 085/102] Add what OS'es run tests as a step summary --- .github/workflows/ci.yml | 100 +++++++++--------- .../templates/test-salt-pkg.yml.jinja | 6 +- .../workflows/templates/test-salt.yml.jinja | 6 +- tools/ci.py | 65 ++++++++++-- 4 files changed, 113 insertions(+), 64 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0b5b9a5f8bc..4765edb40d5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -491,7 +491,7 @@ jobs: almalinux-8-pkg-tests: name: Alma Linux 8 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8') }} needs: - prepare-workflow - build-pkgs-onedir @@ -512,7 +512,7 @@ jobs: almalinux-8-arm64-pkg-tests: name: Alma Linux 8 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -554,7 +554,7 @@ jobs: almalinux-9-arm64-pkg-tests: name: Alma Linux 9 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-9-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-9-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -575,7 +575,7 @@ jobs: amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2') }} needs: - prepare-workflow - build-pkgs-onedir @@ -596,7 +596,7 @@ jobs: amazonlinux-2-arm64-pkg-tests: name: Amazon Linux 2 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -617,7 +617,7 @@ jobs: amazonlinux-2023-pkg-tests: name: Amazon Linux 2023 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2023"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2023') }} needs: - prepare-workflow - build-pkgs-onedir @@ -659,7 +659,7 @@ jobs: centos-7-pkg-tests: name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "centos-7"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'centos-7') }} needs: - prepare-workflow - build-pkgs-onedir @@ -680,7 +680,7 @@ jobs: debian-10-pkg-tests: name: Debian 10 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-10"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-10') }} needs: - prepare-workflow - build-pkgs-onedir @@ -701,7 +701,7 @@ jobs: debian-11-pkg-tests: name: Debian 11 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11') }} needs: - prepare-workflow - build-pkgs-onedir @@ -722,7 +722,7 @@ jobs: debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -743,7 +743,7 @@ jobs: debian-12-pkg-tests: name: Debian 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12') }} needs: - prepare-workflow - build-pkgs-onedir @@ -764,7 +764,7 @@ jobs: debian-12-arm64-pkg-tests: name: Debian 12 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -785,7 +785,7 @@ jobs: photonos-4-pkg-tests: name: Photon OS 4 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4') }} needs: - prepare-workflow - build-pkgs-onedir @@ -807,7 +807,7 @@ jobs: photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -829,7 +829,7 @@ jobs: photonos-5-pkg-tests: name: Photon OS 5 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5') }} needs: - prepare-workflow - build-pkgs-onedir @@ -851,7 +851,7 @@ jobs: photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -873,7 +873,7 @@ jobs: ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04') }} needs: - prepare-workflow - build-pkgs-onedir @@ -894,7 +894,7 @@ jobs: ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -915,7 +915,7 @@ jobs: ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-22.04"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-22.04') }} needs: - prepare-workflow - build-pkgs-onedir @@ -957,7 +957,7 @@ jobs: macos-12-pkg-tests: name: macOS 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-12"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-12') }} needs: - prepare-workflow - build-pkgs-onedir @@ -979,7 +979,7 @@ jobs: macos-13-pkg-tests: name: macOS 13 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-13"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-13') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1023,7 +1023,7 @@ jobs: windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2016"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2016') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1044,7 +1044,7 @@ jobs: windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2016"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2016') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1065,7 +1065,7 @@ jobs: windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2019"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2019') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1086,7 +1086,7 @@ jobs: windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2019"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2019') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1149,7 +1149,7 @@ jobs: windows-2016: name: Windows 2016 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2016"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2016') }} needs: - prepare-workflow - build-ci-deps @@ -1170,7 +1170,7 @@ jobs: windows-2019: name: Windows 2019 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "windows-2019"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2019') }} needs: - prepare-workflow - build-ci-deps @@ -1212,7 +1212,7 @@ jobs: macos-12: name: macOS 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-12"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-12') }} needs: - prepare-workflow - build-ci-deps @@ -1234,7 +1234,7 @@ jobs: macos-13: name: macOS 13 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('["all", "macos-13"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-13') }} needs: - prepare-workflow - build-ci-deps @@ -1278,7 +1278,7 @@ jobs: almalinux-8: name: Alma Linux 8 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8') }} needs: - prepare-workflow - build-ci-deps @@ -1299,7 +1299,7 @@ jobs: almalinux-8-arm64: name: Alma Linux 8 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-8-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1341,7 +1341,7 @@ jobs: almalinux-9-arm64: name: Alma Linux 9 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "almalinux-9-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-9-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1362,7 +1362,7 @@ jobs: amazonlinux-2: name: Amazon Linux 2 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2') }} needs: - prepare-workflow - build-ci-deps @@ -1383,7 +1383,7 @@ jobs: amazonlinux-2-arm64: name: Amazon Linux 2 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1404,7 +1404,7 @@ jobs: amazonlinux-2023: name: Amazon Linux 2023 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "amazonlinux-2023"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2023') }} needs: - prepare-workflow - build-ci-deps @@ -1467,7 +1467,7 @@ jobs: centos-7: name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "centos-7"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'centos-7') }} needs: - prepare-workflow - build-ci-deps @@ -1488,7 +1488,7 @@ jobs: debian-10: name: Debian 10 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-10"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-10') }} needs: - prepare-workflow - build-ci-deps @@ -1509,7 +1509,7 @@ jobs: debian-11: name: Debian 11 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11') }} needs: - prepare-workflow - build-ci-deps @@ -1530,7 +1530,7 @@ jobs: debian-11-arm64: name: Debian 11 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-11-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1551,7 +1551,7 @@ jobs: debian-12: name: Debian 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12') }} needs: - prepare-workflow - build-ci-deps @@ -1572,7 +1572,7 @@ jobs: debian-12-arm64: name: Debian 12 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "debian-12-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1593,7 +1593,7 @@ jobs: fedora-39: name: Fedora 39 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "fedora-39"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'fedora-39') }} needs: - prepare-workflow - build-ci-deps @@ -1614,7 +1614,7 @@ jobs: opensuse-15: name: Opensuse 15 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "opensuse-15"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'opensuse-15') }} needs: - prepare-workflow - build-ci-deps @@ -1635,7 +1635,7 @@ jobs: photonos-4: name: Photon OS 4 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4') }} needs: - prepare-workflow - build-ci-deps @@ -1657,7 +1657,7 @@ jobs: photonos-4-arm64: name: Photon OS 4 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-4-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1679,7 +1679,7 @@ jobs: photonos-5: name: Photon OS 5 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5') }} needs: - prepare-workflow - build-ci-deps @@ -1701,7 +1701,7 @@ jobs: photonos-5-arm64: name: Photon OS 5 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "photonos-5-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1723,7 +1723,7 @@ jobs: ubuntu-2004: name: Ubuntu 20.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04') }} needs: - prepare-workflow - build-ci-deps @@ -1744,7 +1744,7 @@ jobs: ubuntu-2004-arm64: name: Ubuntu 20.04 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-20.04-arm64"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1765,7 +1765,7 @@ jobs: ubuntu-2204: name: Ubuntu 22.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('["all", "ubuntu-22.04"]'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-22.04') }} needs: - prepare-workflow - build-ci-deps diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index d42155e552f..971ebadf51d 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -7,7 +7,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -43,7 +43,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -76,7 +76,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index c3ea4eee146..65583ec9f72 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -12,7 +12,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -43,7 +43,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -74,7 +74,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON('<{ os.os_labels_json() }>'), needs.prepare-workflow.outputs.os-labels) }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow diff --git a/tools/ci.py b/tools/ci.py index ed1c0416630..e470fd666d0 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -949,18 +949,39 @@ def get_pr_test_labels( pr = gh_event["pull_request"]["number"] labels = _get_pr_test_labels_from_event_payload(gh_event) - os_labels = [] + shared_context = tools.utils.get_cicd_shared_context() + mandatory_os_slugs = set(shared_context["mandatory_os_slugs"]) + available = set(tools.utils.get_golden_images()) + # Add MacOS provided by GitHub + available.update({"macos-12", "macos-13", "macos-13-arm64"}) + # Remove mandatory OS'ss + available.difference_update(mandatory_os_slugs) + select_all = set(available) + selected = set() test_labels = [] if labels: ctx.info(f"Test labels for pull-request #{pr} on {repository}:") for name, description in sorted(labels): ctx.info(f" * [yellow]{name}[/yellow]: {description}") - test_labels.append(name) if name.startswith("test:os:"): - os_labels.append(name.split("test:os:", 1)[-1]) + slug = name.split("test:os:", 1)[-1] + if slug not in available and name != "test:os:all": + ctx.warn( + f"The '{slug}' slug exists as a label but not as an available OS." + ) + selected.add(slug) + if slug != "all": + available.remove(slug) + continue + test_labels.append(name) + else: ctx.info(f"No test labels for pull-request #{pr} on {repository}") + if "all" in selected: + selected = select_all + available.clear() + github_output = os.environ.get("GITHUB_OUTPUT") if github_output is None: ctx.exit(0) @@ -970,14 +991,42 @@ def get_pr_test_labels( ctx.info("Writing 'labels' to the github outputs file...") ctx.info("Test Labels:") - for label in sorted(test_labels): - ctx.info(f" * [yellow]{label}[/yellow]") + if not test_labels: + ctx.info(" * None") + else: + for label in sorted(test_labels): + ctx.info(f" * [yellow]{label}[/yellow]") ctx.info("* OS Labels:") - for slug in sorted(selected): - ctx.info(f" * [yellow]{slug}[/yellow]") + if not selected: + ctx.info(" * None") + else: + for slug in sorted(selected): + ctx.info(f" * [yellow]{slug}[/yellow]") with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"os-labels={json.dumps([label for label in os_labels])}\n") + wfh.write(f"os-labels={json.dumps([label for label in selected])}\n") wfh.write(f"test-labels={json.dumps([label for label in test_labels])}\n") + + github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") + if github_step_summary is not None: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Mandatory OS Test Runs:\n") + for slug in sorted(mandatory_os_slugs): + wfh.write(f"* `{slug}`\n") + + wfh.write("\nOptional OS Test Runs(selected by label):\n") + if not selected: + wfh.write("* None\n") + else: + for slug in sorted(selected): + wfh.write(f"* `{slug}`\n") + + wfh.write("\nSkipped OS Tests Runs(NOT selected by label):\n") + if not available: + wfh.write("* None\n") + else: + for slug in sorted(available): + wfh.write(f"* `{slug}`\n") + ctx.exit(0) From b62203973cad900e51885aab3ffcaef40f514b04 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 09:18:23 +0000 Subject: [PATCH 086/102] Always print out the list of jobs to run --- .github/workflows/ci.yml | 6 +----- .github/workflows/nightly.yml | 6 +----- .github/workflows/scheduled.yml | 6 +----- .github/workflows/staging.yml | 6 +----- .github/workflows/templates/layout.yml.jinja | 6 +----- tools/ci.py | 7 ++++++- 6 files changed, 11 insertions(+), 26 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 4765edb40d5..05f3b003960 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -202,15 +202,11 @@ jobs: run: | echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs + - name: Define Jobs To Run id: define-jobs run: | tools ci define-jobs ${{ github.event_name }} changed-files.json - - name: Check Defined Jobs - run: | - echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' - - name: Get Salt Releases id: get-salt-releases env: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 1ff3ccb7db5..2055c3cf41f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -246,15 +246,11 @@ jobs: run: | echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs + - name: Define Jobs To Run id: define-jobs run: | tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json - - name: Check Defined Jobs - run: | - echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' - - name: Get Salt Releases id: get-salt-releases env: diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 524ffc4613b..584bdc3739e 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -236,15 +236,11 @@ jobs: run: | echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs + - name: Define Jobs To Run id: define-jobs run: | tools ci define-jobs ${{ github.event_name }} changed-files.json - - name: Check Defined Jobs - run: | - echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' - - name: Get Salt Releases id: get-salt-releases env: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 3812b70ea9e..bf66b82a99f 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -241,15 +241,11 @@ jobs: run: | echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs + - name: Define Jobs To Run id: define-jobs run: | tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ github.event_name }} changed-files.json - - name: Check Defined Jobs - run: | - echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' - - name: Get Salt Releases id: get-salt-releases env: diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 211828c4386..5bc899f9902 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -264,17 +264,13 @@ jobs: run: | echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs + - name: Define Jobs To Run id: define-jobs run: | tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{ prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite }> ${{ github.event_name }} changed-files.json - - name: Check Defined Jobs - run: | - echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' - - name: Get Salt Releases id: get-salt-releases env: diff --git a/tools/ci.py b/tools/ci.py index e470fd666d0..c9e516f8bd8 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -311,6 +311,11 @@ def define_jobs( if event_name != "pull_request": # In this case, all defined jobs should run + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Selected Jobs:\n") + for name, value in sorted(jobs.items()): + wfh.write(f" - `{name}`: {value}\n") + ctx.info("Writing 'jobs' to the github outputs file") with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"jobs={json.dumps(jobs)}\n") @@ -422,7 +427,7 @@ def define_jobs( with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write("Selected Jobs:\n") for name, value in sorted(jobs.items()): - wfh.write(f" - {name}: {value}\n") + wfh.write(f" - `{name}`: {value}\n") ctx.info("Writing 'jobs' to the github outputs file") with open(github_output, "a", encoding="utf-8") as wfh: From fd82bcab9cb29b577ec39eb477988e9b2e8234f3 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 09:21:54 +0000 Subject: [PATCH 087/102] Always print out the defined testrun --- .github/workflows/ci.yml | 4 ---- .github/workflows/nightly.yml | 4 ---- .github/workflows/scheduled.yml | 4 ---- .github/workflows/staging.yml | 4 ---- .github/workflows/templates/layout.yml.jinja | 4 ---- tools/ci.py | 2 +- 6 files changed, 1 insertion(+), 21 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 05f3b003960..0c62780cdcf 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -226,10 +226,6 @@ jobs: run: | tools ci define-testrun ${{ github.event_name }} changed-files.json - - name: Check Defined Test Run - run: | - echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.' - - name: Check Contents of generated testrun-changed-files.txt if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 2055c3cf41f..2517b9bcb3e 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -270,10 +270,6 @@ jobs: run: | tools ci define-testrun ${{ github.event_name }} changed-files.json - - name: Check Defined Test Run - run: | - echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.' - - name: Check Contents of generated testrun-changed-files.txt if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 584bdc3739e..012a99b8d00 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -260,10 +260,6 @@ jobs: run: | tools ci define-testrun ${{ github.event_name }} changed-files.json - - name: Check Defined Test Run - run: | - echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.' - - name: Check Contents of generated testrun-changed-files.txt if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} run: | diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index bf66b82a99f..584cd06797c 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -265,10 +265,6 @@ jobs: run: | tools ci define-testrun ${{ github.event_name }} changed-files.json - - name: Check Defined Test Run - run: | - echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.' - - name: Check Contents of generated testrun-changed-files.txt if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} run: | diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 5bc899f9902..fd738b7b4a4 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -290,10 +290,6 @@ jobs: run: | tools ci define-testrun ${{ github.event_name }} changed-files.json - - name: Check Defined Test Run - run: | - echo '${{ steps.define-testrun.outputs.testrun }}' | jq -C '.' - - name: Check Contents of generated testrun-changed-files.txt if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['type'] != 'full' }} run: | diff --git a/tools/ci.py b/tools/ci.py index c9e516f8bd8..f4be8586340 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -626,7 +626,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): wfh.write(f"{path}\n") wfh.write("\n\n") - ctx.info("Writing 'testrun' to the github outputs file") + ctx.info("Writing 'testrun' to the github outputs file:\n", testrun) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"testrun={json.dumps(testrun)}\n") From 895b761592ef187de6f2225b42ddc2e86e01f6cd Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 09:24:35 +0000 Subject: [PATCH 088/102] Always print the collected runners --- .github/workflows/ci.yml | 4 ---- .github/workflows/nightly.yml | 4 ---- .github/workflows/scheduled.yml | 4 ---- .github/workflows/staging.yml | 4 ---- .github/workflows/templates/layout.yml.jinja | 4 ---- tools/ci.py | 8 ++++---- 6 files changed, 4 insertions(+), 24 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 0c62780cdcf..55b612aa9ff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -198,10 +198,6 @@ jobs: run: | tools ci runner-types ${{ github.event_name }} - - name: Check Defined Runners - run: | - echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs To Run id: define-jobs run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 2517b9bcb3e..67b658a81cb 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -242,10 +242,6 @@ jobs: run: | tools ci runner-types ${{ github.event_name }} - - name: Check Defined Runners - run: | - echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs To Run id: define-jobs run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 012a99b8d00..e92b3eecc57 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -232,10 +232,6 @@ jobs: run: | tools ci runner-types ${{ github.event_name }} - - name: Check Defined Runners - run: | - echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs To Run id: define-jobs run: | diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 584cd06797c..d3ed1920952 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -237,10 +237,6 @@ jobs: run: | tools ci runner-types ${{ github.event_name }} - - name: Check Defined Runners - run: | - echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs To Run id: define-jobs run: | diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index fd738b7b4a4..244877ec68b 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -260,10 +260,6 @@ jobs: run: | tools ci runner-types ${{ github.event_name }} - - name: Check Defined Runners - run: | - echo '${{ steps.runner-types.outputs.runners }}' | jq -C '.' - - name: Define Jobs To Run id: define-jobs run: | diff --git a/tools/ci.py b/tools/ci.py index f4be8586340..1f0ca25f810 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -202,7 +202,7 @@ def runner_types(ctx: Context, event_name: str): # If this is a pull request coming from the same repository, don't run anything ctx.info("Pull request is coming from the same repository.") ctx.info("Not running any jobs since they will run against the branch") - ctx.info("Writing 'runners' to the github outputs file") + ctx.info("Writing 'runners' to the github outputs file:\n", runners) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"runners={json.dumps(runners)}\n") ctx.exit(0) @@ -210,7 +210,7 @@ def runner_types(ctx: Context, event_name: str): # This is a PR from a forked repository ctx.info("Pull request is not comming from the same repository") runners["github-hosted"] = runners["self-hosted"] = True - ctx.info("Writing 'runners' to the github outputs file") + ctx.info("Writing 'runners' to the github outputs file:\n", runners) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"runners={json.dumps(runners)}\n") ctx.exit(0) @@ -224,7 +224,7 @@ def runner_types(ctx: Context, event_name: str): # This is running on a forked repository, don't run tests ctx.info("The push event is on a forked repository") runners["github-hosted"] = True - ctx.info("Writing 'runners' to the github outputs file") + ctx.info("Writing 'runners' to the github outputs file:\n", runners) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"runners={json.dumps(runners)}\n") ctx.exit(0) @@ -232,7 +232,7 @@ def runner_types(ctx: Context, event_name: str): # Not running on a fork, or the fork has self hosted runners, run everything ctx.info(f"The {event_name!r} event is from the main repository") runners["github-hosted"] = runners["self-hosted"] = True - ctx.info("Writing 'runners' to the github outputs file") + ctx.info("Writing 'runners' to the github outputs file:\n", runners) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"runners={json.dumps(runners)}") ctx.exit(0) From 027d95a3bca688bc3ee488e5e1f06158cccbcc49 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 27 Mar 2024 22:54:39 +0000 Subject: [PATCH 089/102] Fix cache prefixes --- .github/workflows/build-deps-onedir.yml | 6 +++--- .github/workflows/build-salt-onedir.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml index 8a13c7e3777..df4d699a87c 100644 --- a/.github/workflows/build-deps-onedir.yml +++ b/.github/workflows/build-deps-onedir.yml @@ -65,7 +65,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }}-build-deps-linux-${{ matrix.arch }} + cache-prefix: ${{ inputs.cache-seed }}|build-deps|linux|${{ matrix.arch }} - name: Setup Relenv id: setup-relenv @@ -116,7 +116,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }}-build-deps-macos + cache-prefix: ${{ inputs.cache-seed }}|build-deps|macos - name: Setup Relenv id: setup-relenv @@ -166,7 +166,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }}-build-deps-windows-${{ matrix.arch }} + cache-prefix: ${{ inputs.cache-seed }}|build-deps|windows|${{ matrix.arch }} - name: Setup Relenv id: setup-relenv diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml index 7913860cf7d..ff4b7a3857f 100644 --- a/.github/workflows/build-salt-onedir.yml +++ b/.github/workflows/build-salt-onedir.yml @@ -65,7 +65,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-windows + cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|linux - name: Setup Salt Version id: setup-salt-version @@ -130,7 +130,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos + cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|macos - name: Setup Salt Version id: setup-salt-version @@ -185,7 +185,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos + cache-prefix: ${{ inputs.cache-seed }}|build-salt-onedir|windows - name: Setup Salt Version id: setup-salt-version From 4eccba4f2dcd9c8b2972dddbb49e433fb608a65c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 28 Mar 2024 11:27:48 +0000 Subject: [PATCH 090/102] Check against an array, not a string --- .github/workflows/ci.yml | 100 +++++++++--------- .../templates/test-salt-pkg.yml.jinja | 6 +- .../workflows/templates/test-salt.yml.jinja | 6 +- 3 files changed, 56 insertions(+), 56 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 55b612aa9ff..8b86865fd21 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -479,7 +479,7 @@ jobs: almalinux-8-pkg-tests: name: Alma Linux 8 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8') }} needs: - prepare-workflow - build-pkgs-onedir @@ -500,7 +500,7 @@ jobs: almalinux-8-arm64-pkg-tests: name: Alma Linux 8 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -542,7 +542,7 @@ jobs: almalinux-9-arm64-pkg-tests: name: Alma Linux 9 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-9-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-9-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -563,7 +563,7 @@ jobs: amazonlinux-2-pkg-tests: name: Amazon Linux 2 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }} needs: - prepare-workflow - build-pkgs-onedir @@ -584,7 +584,7 @@ jobs: amazonlinux-2-arm64-pkg-tests: name: Amazon Linux 2 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -605,7 +605,7 @@ jobs: amazonlinux-2023-pkg-tests: name: Amazon Linux 2023 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2023') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }} needs: - prepare-workflow - build-pkgs-onedir @@ -647,7 +647,7 @@ jobs: centos-7-pkg-tests: name: CentOS 7 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'centos-7') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }} needs: - prepare-workflow - build-pkgs-onedir @@ -668,7 +668,7 @@ jobs: debian-10-pkg-tests: name: Debian 10 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-10') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-10') }} needs: - prepare-workflow - build-pkgs-onedir @@ -689,7 +689,7 @@ jobs: debian-11-pkg-tests: name: Debian 11 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }} needs: - prepare-workflow - build-pkgs-onedir @@ -710,7 +710,7 @@ jobs: debian-11-arm64-pkg-tests: name: Debian 11 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -731,7 +731,7 @@ jobs: debian-12-pkg-tests: name: Debian 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }} needs: - prepare-workflow - build-pkgs-onedir @@ -752,7 +752,7 @@ jobs: debian-12-arm64-pkg-tests: name: Debian 12 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -773,7 +773,7 @@ jobs: photonos-4-pkg-tests: name: Photon OS 4 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }} needs: - prepare-workflow - build-pkgs-onedir @@ -795,7 +795,7 @@ jobs: photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -817,7 +817,7 @@ jobs: photonos-5-pkg-tests: name: Photon OS 5 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }} needs: - prepare-workflow - build-pkgs-onedir @@ -839,7 +839,7 @@ jobs: photonos-5-arm64-pkg-tests: name: Photon OS 5 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -861,7 +861,7 @@ jobs: ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }} needs: - prepare-workflow - build-pkgs-onedir @@ -882,7 +882,7 @@ jobs: ubuntu-2004-arm64-pkg-tests: name: Ubuntu 20.04 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }} needs: - prepare-workflow - build-pkgs-onedir @@ -903,7 +903,7 @@ jobs: ubuntu-2204-pkg-tests: name: Ubuntu 22.04 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-22.04') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }} needs: - prepare-workflow - build-pkgs-onedir @@ -945,7 +945,7 @@ jobs: macos-12-pkg-tests: name: macOS 12 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-12') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-12') }} needs: - prepare-workflow - build-pkgs-onedir @@ -967,7 +967,7 @@ jobs: macos-13-pkg-tests: name: macOS 13 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-13') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1011,7 +1011,7 @@ jobs: windows-2016-nsis-pkg-tests: name: Windows 2016 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2016') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1032,7 +1032,7 @@ jobs: windows-2016-msi-pkg-tests: name: Windows 2016 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2016') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1053,7 +1053,7 @@ jobs: windows-2019-nsis-pkg-tests: name: Windows 2019 NSIS Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2019') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1074,7 +1074,7 @@ jobs: windows-2019-msi-pkg-tests: name: Windows 2019 MSI Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2019') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }} needs: - prepare-workflow - build-pkgs-onedir @@ -1137,7 +1137,7 @@ jobs: windows-2016: name: Windows 2016 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2016') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2016') }} needs: - prepare-workflow - build-ci-deps @@ -1158,7 +1158,7 @@ jobs: windows-2019: name: Windows 2019 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'windows-2019') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'windows-2019') }} needs: - prepare-workflow - build-ci-deps @@ -1200,7 +1200,7 @@ jobs: macos-12: name: macOS 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-12') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-12') }} needs: - prepare-workflow - build-ci-deps @@ -1222,7 +1222,7 @@ jobs: macos-13: name: macOS 13 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'macos-13') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'macos-13') }} needs: - prepare-workflow - build-ci-deps @@ -1266,7 +1266,7 @@ jobs: almalinux-8: name: Alma Linux 8 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8') }} needs: - prepare-workflow - build-ci-deps @@ -1287,7 +1287,7 @@ jobs: almalinux-8-arm64: name: Alma Linux 8 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-8-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1329,7 +1329,7 @@ jobs: almalinux-9-arm64: name: Alma Linux 9 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'almalinux-9-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-9-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1350,7 +1350,7 @@ jobs: amazonlinux-2: name: Amazon Linux 2 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2') }} needs: - prepare-workflow - build-ci-deps @@ -1371,7 +1371,7 @@ jobs: amazonlinux-2-arm64: name: Amazon Linux 2 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1392,7 +1392,7 @@ jobs: amazonlinux-2023: name: Amazon Linux 2023 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'amazonlinux-2023') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'amazonlinux-2023') }} needs: - prepare-workflow - build-ci-deps @@ -1455,7 +1455,7 @@ jobs: centos-7: name: CentOS 7 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'centos-7') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'centos-7') }} needs: - prepare-workflow - build-ci-deps @@ -1476,7 +1476,7 @@ jobs: debian-10: name: Debian 10 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-10') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-10') }} needs: - prepare-workflow - build-ci-deps @@ -1497,7 +1497,7 @@ jobs: debian-11: name: Debian 11 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11') }} needs: - prepare-workflow - build-ci-deps @@ -1518,7 +1518,7 @@ jobs: debian-11-arm64: name: Debian 11 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-11-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-11-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1539,7 +1539,7 @@ jobs: debian-12: name: Debian 12 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12') }} needs: - prepare-workflow - build-ci-deps @@ -1560,7 +1560,7 @@ jobs: debian-12-arm64: name: Debian 12 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'debian-12-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'debian-12-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1581,7 +1581,7 @@ jobs: fedora-39: name: Fedora 39 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'fedora-39') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'fedora-39') }} needs: - prepare-workflow - build-ci-deps @@ -1602,7 +1602,7 @@ jobs: opensuse-15: name: Opensuse 15 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'opensuse-15') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'opensuse-15') }} needs: - prepare-workflow - build-ci-deps @@ -1623,7 +1623,7 @@ jobs: photonos-4: name: Photon OS 4 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4') }} needs: - prepare-workflow - build-ci-deps @@ -1645,7 +1645,7 @@ jobs: photonos-4-arm64: name: Photon OS 4 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-4-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-4-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1667,7 +1667,7 @@ jobs: photonos-5: name: Photon OS 5 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5') }} needs: - prepare-workflow - build-ci-deps @@ -1689,7 +1689,7 @@ jobs: photonos-5-arm64: name: Photon OS 5 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'photonos-5-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'photonos-5-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1711,7 +1711,7 @@ jobs: ubuntu-2004: name: Ubuntu 20.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04') }} needs: - prepare-workflow - build-ci-deps @@ -1732,7 +1732,7 @@ jobs: ubuntu-2004-arm64: name: Ubuntu 20.04 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-20.04-arm64') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-20.04-arm64') }} needs: - prepare-workflow - build-ci-deps @@ -1753,7 +1753,7 @@ jobs: ubuntu-2204: name: Ubuntu 22.04 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, 'ubuntu-22.04') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'ubuntu-22.04') }} needs: - prepare-workflow - build-ci-deps diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 971ebadf51d..1a34ad8e7fd 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -7,7 +7,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -43,7 +43,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -76,7 +76,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index 65583ec9f72..7e89bf57994 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -12,7 +12,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -43,7 +43,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow @@ -74,7 +74,7 @@ <%- if workflow_slug != "ci" or os.slug in mandatory_os_slugs %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} <%- else %> - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(needs.prepare-workflow.outputs.os-labels, '<{ os.slug }>') }} + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), '<{ os.slug }>') }} <%- endif %> needs: - prepare-workflow From 4166a8ba925abaaa63ec18dcd89ea68e061cee87 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 28 Mar 2024 13:53:50 +0000 Subject: [PATCH 091/102] For now, ignore the host keys --- tests/pytests/integration/netapi/test_ssh_client.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py index 11f25d9bd47..189ba0a8265 100644 --- a/tests/pytests/integration/netapi/test_ssh_client.py +++ b/tests/pytests/integration/netapi/test_ssh_client.py @@ -198,6 +198,7 @@ def test_shell_inject_tgt(client, salt_ssh_roster_file, tmp_path, salt_auto_acco "eauth": "auto", "username": salt_auto_account.username, "password": salt_auto_account.password, + "ignore_host_keys": True, } ret = client.run(low) assert path.exists() is False @@ -252,6 +253,7 @@ def test_shell_inject_ssh_port( "roster_file": str(salt_ssh_roster_file), "rosters": "/", "ssh_port": f"hhhhh|id>{path} #", + "ignore_host_keys": True, } ret = client.run(low) assert path.exists() is False From 5cab9dff7dd5f96b873dbaddce01d235285e8876 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 28 Mar 2024 18:23:30 +0000 Subject: [PATCH 092/102] We still need VC Redist on 3007.x --- pkg/windows/msi/Product.wxs | 10 ++ pkg/windows/msi/README-how-to-build.md | 2 + pkg/windows/msi/build_pkg.ps1 | 31 ++++ .../nsis/installer/Salt-Minion-Setup.nsi | 166 ++++++++++++++++++ pkg/windows/nsis/tests/setup.ps1 | 17 +- pkg/windows/prep_salt.ps1 | 24 +++ 6 files changed, 249 insertions(+), 1 deletion(-) diff --git a/pkg/windows/msi/Product.wxs b/pkg/windows/msi/Product.wxs index 9893a32ab0f..121788db9a0 100644 --- a/pkg/windows/msi/Product.wxs +++ b/pkg/windows/msi/Product.wxs @@ -246,6 +246,15 @@ IMCAC - Immediate Custom Action - It's immediate + + + + + + + + + @@ -258,6 +267,7 @@ IMCAC - Immediate Custom Action - It's immediate + diff --git a/pkg/windows/msi/README-how-to-build.md b/pkg/windows/msi/README-how-to-build.md index 34327ba3ab6..1e84dc3a35d 100644 --- a/pkg/windows/msi/README-how-to-build.md +++ b/pkg/windows/msi/README-how-to-build.md @@ -10,6 +10,8 @@ You need - .Net 3.5 SDK (for WiX)* - [Wix 3](http://wixtoolset.org/releases/)** - [Build tools 2015](https://www.microsoft.com/en-US/download/confirmation.aspx?id=48159)** +- Microsoft_VC143_CRT_x64.msm from Visual Studio 2015** +- Microsoft_VC143_CRT_x86.msm from Visual Studio 2015** Notes: - * `build.cmd` will open `optionalfeatures` if necessary. diff --git a/pkg/windows/msi/build_pkg.ps1 b/pkg/windows/msi/build_pkg.ps1 index 15a1778d455..11c531590ea 100644 --- a/pkg/windows/msi/build_pkg.ps1 +++ b/pkg/windows/msi/build_pkg.ps1 @@ -76,6 +76,8 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) { # Script Variables #------------------------------------------------------------------------------- +$WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir" +$DEPS_URL = "https://repo.saltproject.io/windows/dependencies" $PROJECT_DIR = $(git rev-parse --show-toplevel) $BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" $BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" @@ -122,6 +124,21 @@ Write-Host "- Architecture: $BUILD_ARCH" Write-Host "- Salt Version: $Version" Write-Host $("-" * 80) +#------------------------------------------------------------------------------- +# Ensure cache dir exists +#------------------------------------------------------------------------------- + +if ( ! (Test-Path -Path $WEBCACHE_DIR) ) { + Write-Host "Creating cache directory: " -NoNewline + New-Item -ItemType directory -Path $WEBCACHE_DIR | Out-Null + if ( Test-Path -Path $WEBCACHE_DIR ) { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } +} + #------------------------------------------------------------------------------- # Ensure WIX environment variable is set, if not refresh and check again #------------------------------------------------------------------------------- @@ -142,6 +159,19 @@ if ( ! "$env:WIX" ) { } } +#------------------------------------------------------------------------------- +# Caching VC++ Runtimes +#------------------------------------------------------------------------------- + +$RUNTIMES = @( + ("Microsoft_VC143_CRT_x64.msm", "64", "F209B8906063A79B0DFFBB55D3C20AC0A676252DD4F5377CFCD148C409C859EC"), + ("Microsoft_VC143_CRT_x86.msm", "32", "B187BD73C7DC0BA353C5D3A6D9D4E63EF72435F8E68273466F30E5496C1A86F7") +) +$RUNTIMES | ForEach-Object { + $name, $arch, $hash = $_ + VerifyOrDownload "$WEBCACHE_DIR\$name" "$DEPS_URL/$arch/$name" "$hash" +} + #------------------------------------------------------------------------------- # Converting to MSI Version #------------------------------------------------------------------------------- @@ -578,6 +608,7 @@ Push-Location $SCRIPT_DIR -dDisplayVersion="$Version" ` -dInternalVersion="$INTERNAL_VERSION" ` -dDISCOVER_INSTALLDIR="$($DISCOVER_INSTALLDIR[$i])" ` + -dWEBCACHE_DIR="$WEBCACHE_DIR" ` -dDISCOVER_CONFDIR="$DISCOVER_CONFDIR" ` -ext "$($ENV:WIX)bin\WixUtilExtension.dll" ` -ext "$($ENV:WIX)bin\WixUIExtension.dll" ` diff --git a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi index 59ca96c76f6..72a66811080 100644 --- a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi +++ b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi @@ -524,6 +524,171 @@ InstallDirRegKey HKLM "${PRODUCT_DIR_REGKEY}" "" ShowInstDetails show ShowUnInstDetails show + +Section -copy_prereqs + # Copy prereqs to the Plugins Directory + # These files are downloaded by build_pkg.bat + # This directory gets removed upon completion + SetOutPath "$PLUGINSDIR\" + File /r "..\..\prereqs\" +SectionEnd + +# Check if the Windows 10 Universal C Runtime (KB2999226) is installed. Python +# 3 needs the updated ucrt on Windows 8.1/2012R2 and lower. They are installed +# via KB2999226, but we're not going to patch the system here. Instead, we're +# going to copy the .dll files to the \salt\bin directory +Section -install_ucrt + + Var /GLOBAL UcrtFileName + + # Get the Major.Minor version Number + # Windows 10 introduced CurrentMajorVersionNumber + ReadRegStr $R0 HKLM "SOFTWARE\Microsoft\Windows NT\CurrentVersion" \ + CurrentMajorVersionNumber + + # Windows 10/2016 will return a value here, skip to the end if returned + StrCmp $R0 '' lbl_needs_ucrt 0 + + # Found Windows 10 + detailPrint "KB2999226 does not apply to this machine" + goto lbl_done + + lbl_needs_ucrt: + # UCRT only needed on Windows Server 2012R2/Windows 8.1 and below. The + # first ReadRegStr command above should have skipped to lbl_done if on + # Windows 10 box + + # Is the update already installed + ClearErrors + + # Use WMI to check if it's installed + detailPrint "Checking for existing UCRT (KB2999226) installation" + nsExec::ExecToStack 'cmd /q /c wmic qfe get hotfixid | findstr "^KB2999226"' + # Clean up the stack + Pop $R0 # Gets the ErrorCode + Pop $R1 # Gets the stdout, which should be KB2999226 if it's installed + + # If it returned KB2999226 it's already installed + StrCmp $R1 'KB2999226' lbl_done + + detailPrint "UCRT (KB2999226) not found" + + # Use RunningX64 here to get the Architecture for the system running the + # installer. + ${If} ${RunningX64} + StrCpy $UcrtFileName "ucrt_x64.zip" + ${Else} + StrCpy $UcrtFileName "ucrt_x86.zip" + ${EndIf} + + ClearErrors + + detailPrint "Unzipping UCRT dll files to $INSTDIR\Scripts" + CreateDirectory $INSTDIR\Scripts + nsisunz::UnzipToLog "$PLUGINSDIR\$UcrtFileName" "$INSTDIR\Scripts" + + # Clean up the stack + Pop $R0 # Get Error + + ${IfNot} $R0 == "success" + detailPrint "error: $R0" + Sleep 3000 + ${Else} + detailPrint "UCRT dll files copied successfully" + ${EndIf} + + lbl_done: + +SectionEnd + + +# Check and install Visual C++ redist 2022 packages +# Hidden section (-) to install VCRedist +Section -install_vcredist_2022 + + Var /GLOBAL VcRedistName + Var /GLOBAL VcRedistReg + + # Only install 64bit VCRedist on 64bit machines + # Use RunningX64 here to get the Architecture for the system running the + # installer. + ${If} ${RunningX64} + StrCpy $VcRedistName "vcredist_x64_2022" + StrCpy $VcRedistReg "SOFTWARE\WOW6432Node\Microsoft\VisualStudio\14.0\VC\Runtimes\x64" + ${Else} + StrCpy $VcRedistName "vcredist_x86_2022" + StrCpy $VcRedistReg "SOFTWARE\Microsoft\VisualStudio\14.0\VC\Runtimes\x86" + ${EndIf} + + # Detecting VCRedist Installation + detailPrint "Checking for $VcRedistName..." + ReadRegDword $0 HKLM $VcRedistReg "Installed" + StrCmp $0 "1" +2 0 + Call InstallVCRedist + +SectionEnd + + +Function InstallVCRedist + detailPrint "System requires $VcRedistName" + MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 \ + "$VcRedistName is currently not installed. Would you like to \ + install?" \ + /SD IDYES IDYES InstallVcRedist + + detailPrint "$VcRedistName not installed" + detailPrint ">>>Installation aborted by user<<<" + MessageBox MB_ICONEXCLAMATION \ + "$VcRedistName not installed. Aborted by user.$\n$\n\ + Installer will now close." \ + /SD IDOK + Quit + + InstallVcRedist: + + # If an output variable is specified ($0 in the case below), ExecWait + # sets the variable with the exit code (and only sets the error flag if + # an error occurs; if an error occurs, the contents of the user + # variable are undefined). + # http://nsis.sourceforge.net/Reference/ExecWait + ClearErrors + detailPrint "Installing $VcRedistName..." + ExecWait '"$PLUGINSDIR\$VcRedistName.exe" /install /quiet /norestart' $0 + + IfErrors 0 CheckVcRedistErrorCode + + detailPrint "An error occurred during installation of $VcRedistName" + MessageBox MB_OK|MB_ICONEXCLAMATION \ + "$VcRedistName failed to install. Try installing the package \ + manually.$\n$\n\ + The installer will now close." \ + /SD IDOK + Quit + + CheckVcRedistErrorCode: + # Check for Reboot Error Code (3010) + ${If} $0 == 3010 + detailPrint "$VcRedistName installed but requires a restart to complete." + detailPrint "Reboot and run Salt install again" + MessageBox MB_OK|MB_ICONINFORMATION \ + "$VcRedistName installed but requires a restart to complete." \ + /SD IDOK + + # Check for any other errors + ${ElseIfNot} $0 == 0 + detailPrint "An error occurred during installation of $VcRedistName" + detailPrint "Error: $0" + MessageBox MB_OK|MB_ICONEXCLAMATION \ + "$VcRedistName failed to install. Try installing the package \ + mnually.$\n\ + ErrorCode: $0$\n\ + The installer will now close." \ + /SD IDOK + ${EndIf} + +FunctionEnd + + Section "MainSection" SEC01 ${If} $MoveExistingConfig == 1 @@ -598,6 +763,7 @@ Function .onInit ${EndIf} ${EndIf} + InitPluginsDir Call parseInstallerCommandLineSwitches # Uninstall msi-installed salt diff --git a/pkg/windows/nsis/tests/setup.ps1 b/pkg/windows/nsis/tests/setup.ps1 index c5d8b7459a6..37ca0f74640 100644 --- a/pkg/windows/nsis/tests/setup.ps1 +++ b/pkg/windows/nsis/tests/setup.ps1 @@ -35,6 +35,7 @@ $SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").Directo $WINDOWS_DIR = "$PROJECT_DIR\pkg\windows" $NSIS_DIR = "$WINDOWS_DIR\nsis" $BUILDENV_DIR = "$WINDOWS_DIR\buildenv" +$PREREQS_DIR = "$WINDOWS_DIR\prereqs" $NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe" #------------------------------------------------------------------------------- @@ -49,7 +50,8 @@ Write-Host $("-" * 80) # Setup Directories #------------------------------------------------------------------------------- -$directories = "$BUILDENV_DIR", +$directories = "$PREREQS_DIR", + "$BUILDENV_DIR", "$BUILDENV_DIR\configs" $directories | ForEach-Object { if ( ! (Test-Path -Path "$_") ) { @@ -68,6 +70,19 @@ $directories | ForEach-Object { # Create binaries #------------------------------------------------------------------------------- +$prereq_files = "vcredist_x86_2022.exe", + "vcredist_x64_2022.exe", +$prereq_files | ForEach-Object { + Write-Host "Creating $_`: " -NoNewline + Set-Content -Path "$PREREQS_DIR\$_" -Value "binary" + if ( Test-Path -Path "$PREREQS_DIR\$_" ) { + Write-Result "Success" + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } +} + $binary_files = "ssm.exe", "python.exe" $binary_files | ForEach-Object { diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index ed3f079713d..a583b17c985 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -62,6 +62,7 @@ if ( $BuildDir ) { } else { $BUILD_DIR = "$SCRIPT_DIR\buildenv" } +$PREREQ_DIR = "$SCRIPT_DIR\prereqs" $SCRIPTS_DIR = "$BUILD_DIR\Scripts" $BUILD_CONF_DIR = "$BUILD_DIR\configs" $SITE_PKGS_DIR = "$BUILD_DIR\Lib\site-packages" @@ -125,6 +126,17 @@ if ( Test-Path -Path $BUILD_CONF_DIR) { } } +if ( Test-Path -Path $PREREQ_DIR ) { + Write-Host "Removing PreReq Directory: " -NoNewline + Remove-Item -Path $PREREQ_DIR -Recurse -Force + if ( ! (Test-Path -Path $PREREQ_DIR) ) { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } +} + #------------------------------------------------------------------------------- # Staging the Build Environment #------------------------------------------------------------------------------- @@ -171,6 +183,18 @@ $scripts | ForEach-Object { } } +# Copy VCRedist 2022 to the prereqs directory +New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null +Write-Host "Copying VCRedist 2022 $ARCH_X to prereqs: " -NoNewline +$file = "vcredist_$ARCH_X`_2022.exe" +Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file" +if ( Test-Path -Path "$PREREQ_DIR\$file" ) { + Write-Result "Success" -ForegroundColor Green +} else { + Write-Result "Failed" -ForegroundColor Red + exit 1 +} + #------------------------------------------------------------------------------- # Remove binaries not needed by Salt #------------------------------------------------------------------------------- From ed345d06e7ebf225e54f815fdae6a4417ecc456f Mon Sep 17 00:00:00 2001 From: nicholasmhughes Date: Thu, 28 Mar 2024 12:40:03 -0400 Subject: [PATCH 093/102] fixes saltstack/salt#66284 x509.certificate_managed - ca_server did not return a certificate --- changelog/66284.fixed.md | 1 + salt/utils/x509.py | 4 +- tests/pytests/functional/utils/test_x509.py | 114 ++++++++++++++++++++ 3 files changed, 118 insertions(+), 1 deletion(-) create mode 100644 changelog/66284.fixed.md create mode 100644 tests/pytests/functional/utils/test_x509.py diff --git a/changelog/66284.fixed.md b/changelog/66284.fixed.md new file mode 100644 index 00000000000..a8299a70161 --- /dev/null +++ b/changelog/66284.fixed.md @@ -0,0 +1 @@ +Fixed x509.certificate_managed - ca_server did not return a certificate diff --git a/salt/utils/x509.py b/salt/utils/x509.py index e9575875c10..ad8bb30fb04 100644 --- a/salt/utils/x509.py +++ b/salt/utils/x509.py @@ -1051,7 +1051,9 @@ def load_file_or_bytes(fob): with salt.utils.files.fopen(fob, "rb") as f: fob = f.read() if isinstance(fob, str): - if PEM_BEGIN.decode() in fob: + if fob.startswith("b64:"): + fob = base64.b64decode(fob[4:]) + elif PEM_BEGIN.decode() in fob: fob = fob.encode() else: try: diff --git a/tests/pytests/functional/utils/test_x509.py b/tests/pytests/functional/utils/test_x509.py new file mode 100644 index 00000000000..87b1e53c491 --- /dev/null +++ b/tests/pytests/functional/utils/test_x509.py @@ -0,0 +1,114 @@ +from textwrap import dedent + +import pytest + +import salt.utils.x509 as x509 + +try: + import cryptography.x509 as cx509 + + HAS_LIBS = True +except ImportError: + HAS_LIBS = False + +pytestmark = [ + pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library") +] + + +@pytest.fixture +def b64cert_with_prefix(): + return ( + "b64:MIIF6jCCA9KgAwIBAgIUHkYQ5opY8AXgK7RNSqUtMcltnqMwDQYJKoZIhvcNAQELBQAwSTELMAkGA1UEBhMCVV" + "MxCzAJBgNVBAgMAk1EMRMwEQYDVQQHDApTeWtlc3ZpbGxlMRgwFgYDVQQDDA9jYS5jZHguZWl0ci5kZXYwHhcNMjQw" + "MzI3MTg0MzU0WhcNMjQwNDI2MTg0MzU0WjBLMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTUQxEzARBgNVBAcMClN5a2" + "VzdmlsbGUxGjAYBgNVBAMMEW5pZmkuY2R4LmVpdHIuZGV2MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA" + "zEhNiCogpOdh6kK+wkh+rBe8/zyE6O0XjcWaEm+i/dhG35KU/c6zZhmkNObtrEwvrqIIKpca2h3IaRb6FAp2VpedGy" + "4/bVihEVRymZOtGo8Yex74THmokkngTfnxyfyZdULc7YL7Pi/FPejcCy8lWypcnLzpTnw0qx2GmRmENyrXvqrB429L" + "HzefZv/FCDPZixqkUuaK3iPqhJd83HXb9BOyi8BtF6b7qrnds0KlivIO/zCUZnfOn2610Dja82eSFASkgDbNJsJn37" + "ktEhbHGtkkCVD6zBH0p0dgXnjQ8Ml0+QJIoSl8RBe2EkZ0ZIMKHIOfleOBOI6Cd2CYyDWjRxD3nFqcRnNGhLNBspm8" + "s8C+3e1iyZQ224fy6BA5FHp3M0UX6ct1+M3JzxxLAbSuG8pc4MC3DLGDK4OlLbAnpFYqBAALs5OKTptxU4eEZqdFfj" + "9PFNknU1lFVrqGFbaE/oRrORsznNFZm3gxRSIvNtDuBJOYUl4KsYHjOjM/G3jRzc1+1K7wVpMoO/kdjIo2zhMEbBTw" + "Lx0xrgBQzzVLLmsib4cFts8zELFkB5nGl1mv2+KSOjQ+gpQtn0lkYSY7iVfVSt13JRY7mIOTnmjHj5mRguvgbr3dNa" + "VfQMCJD7pOMBaxO5O0aiwVE8KjNz9WEDqrzW0BG+ei3fLosDIvbIkCAwEAAaOBxzCBxDAMBgNVHRMBAf8EAjAAMA4G" + "A1UdDwEB/wQEAwIFIDAdBgNVHQ4EFgQUTOTqSBdqbMm4lLxIupUhsTeYPXMwgYQGA1UdIwR9MHuAFBN3hzb/2SCZZl" + "BiHUIZYTJXQZIMoU2kSzBJMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTUQxEzARBgNVBAcMClN5a2VzdmlsbGUxGDAW" + "BgNVBAMMD2NhLmNkeC5laXRyLmRldoIUDVzffz0J8C716U6jXZszcredC1owDQYJKoZIhvcNAQELBQADggIBAGSS/d" + "iai+Imm2559MzTYK5qvCVWCDaizAgH6JZeLZGf9Mk7IEZrS3I9UtjnVH9q4VON5KJtz+CvYU/t+el0AsEfns8Tw/Ff" + "MBTD7cBFBBPtIPxpYh0nzpEvxI8sxKkFt1vmDMuYiBGkPx1OTLwTbL6EbAJznooiWIg0n59Wd1Jn3U8Q4O6/yLy23x" + "ZA/xUSjgIbTXOctBzYC47FwNyjcaQ70gLZJC/pCd+hUoojBaAUHNfuzK0RqF7eP6W67nGVyA1h/B87FG0y6tmuRWWl" + "jwyAz/Nvjb2SXWkgxxkS4ZPZt6z+R8FsRSbMuIR5CeOyMeKUbQfc3hWvII9c7mZkZRYnxUuFqpwUlOWnNX1ufikBQE" + "OOyta3n/Lbj59+QBmPU8ok+RBfyCEKDVw5DAhu95gj6rdxUeWrGLteR8o0O/n6JGnM0B5kJ7y2NnaLa06QYzJUmSs5" + "/icBRwyGSL3Gw9GkkRpGNViRIMpcrqGvr5bYxFeNkQGqiB+0vxiD6s1DOz7djY4K03ZUGYLe3X73CKu+AxbhC95sz6" + "hWURdotqO4CUb9Nd82sY2HCDBFPEFnT1RD+Xi6nkULvHkquhYVV3eHC4LtvhlHjF1LufZ7xOYoteScZL5WvumvrdNS" + "9naI8BZkWtsTl98Z2GhuZPKpOQtMOPXC38qEuNc5UPJhb3Oa" + ) + + +@pytest.fixture +def b64cert(b64cert_with_prefix): + return b64cert_with_prefix[4:] + + +@pytest.fixture +def pemcert(): + return dedent( + """-----BEGIN CERTIFICATE----- + MIIF6jCCA9KgAwIBAgIUHkYQ5opY8AXgK7RNSqUtMcltnqMwDQYJKoZIhvcNAQEL + BQAwSTELMAkGA1UEBhMCVVMxCzAJBgNVBAgMAk1EMRMwEQYDVQQHDApTeWtlc3Zp + bGxlMRgwFgYDVQQDDA9jYS5jZHguZWl0ci5kZXYwHhcNMjQwMzI3MTg0MzU0WhcN + MjQwNDI2MTg0MzU0WjBLMQswCQYDVQQGEwJVUzELMAkGA1UECAwCTUQxEzARBgNV + BAcMClN5a2VzdmlsbGUxGjAYBgNVBAMMEW5pZmkuY2R4LmVpdHIuZGV2MIICIjAN + BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAzEhNiCogpOdh6kK+wkh+rBe8/zyE + 6O0XjcWaEm+i/dhG35KU/c6zZhmkNObtrEwvrqIIKpca2h3IaRb6FAp2VpedGy4/ + bVihEVRymZOtGo8Yex74THmokkngTfnxyfyZdULc7YL7Pi/FPejcCy8lWypcnLzp + Tnw0qx2GmRmENyrXvqrB429LHzefZv/FCDPZixqkUuaK3iPqhJd83HXb9BOyi8Bt + F6b7qrnds0KlivIO/zCUZnfOn2610Dja82eSFASkgDbNJsJn37ktEhbHGtkkCVD6 + zBH0p0dgXnjQ8Ml0+QJIoSl8RBe2EkZ0ZIMKHIOfleOBOI6Cd2CYyDWjRxD3nFqc + RnNGhLNBspm8s8C+3e1iyZQ224fy6BA5FHp3M0UX6ct1+M3JzxxLAbSuG8pc4MC3 + DLGDK4OlLbAnpFYqBAALs5OKTptxU4eEZqdFfj9PFNknU1lFVrqGFbaE/oRrORsz + nNFZm3gxRSIvNtDuBJOYUl4KsYHjOjM/G3jRzc1+1K7wVpMoO/kdjIo2zhMEbBTw + Lx0xrgBQzzVLLmsib4cFts8zELFkB5nGl1mv2+KSOjQ+gpQtn0lkYSY7iVfVSt13 + JRY7mIOTnmjHj5mRguvgbr3dNaVfQMCJD7pOMBaxO5O0aiwVE8KjNz9WEDqrzW0B + G+ei3fLosDIvbIkCAwEAAaOBxzCBxDAMBgNVHRMBAf8EAjAAMA4GA1UdDwEB/wQE + AwIFIDAdBgNVHQ4EFgQUTOTqSBdqbMm4lLxIupUhsTeYPXMwgYQGA1UdIwR9MHuA + FBN3hzb/2SCZZlBiHUIZYTJXQZIMoU2kSzBJMQswCQYDVQQGEwJVUzELMAkGA1UE + CAwCTUQxEzARBgNVBAcMClN5a2VzdmlsbGUxGDAWBgNVBAMMD2NhLmNkeC5laXRy + LmRldoIUDVzffz0J8C716U6jXZszcredC1owDQYJKoZIhvcNAQELBQADggIBAGSS + /diai+Imm2559MzTYK5qvCVWCDaizAgH6JZeLZGf9Mk7IEZrS3I9UtjnVH9q4VON + 5KJtz+CvYU/t+el0AsEfns8Tw/FfMBTD7cBFBBPtIPxpYh0nzpEvxI8sxKkFt1vm + DMuYiBGkPx1OTLwTbL6EbAJznooiWIg0n59Wd1Jn3U8Q4O6/yLy23xZA/xUSjgIb + TXOctBzYC47FwNyjcaQ70gLZJC/pCd+hUoojBaAUHNfuzK0RqF7eP6W67nGVyA1h + /B87FG0y6tmuRWWljwyAz/Nvjb2SXWkgxxkS4ZPZt6z+R8FsRSbMuIR5CeOyMeKU + bQfc3hWvII9c7mZkZRYnxUuFqpwUlOWnNX1ufikBQEOOyta3n/Lbj59+QBmPU8ok + +RBfyCEKDVw5DAhu95gj6rdxUeWrGLteR8o0O/n6JGnM0B5kJ7y2NnaLa06QYzJU + mSs5/icBRwyGSL3Gw9GkkRpGNViRIMpcrqGvr5bYxFeNkQGqiB+0vxiD6s1DOz7d + jY4K03ZUGYLe3X73CKu+AxbhC95sz6hWURdotqO4CUb9Nd82sY2HCDBFPEFnT1RD + +Xi6nkULvHkquhYVV3eHC4LtvhlHjF1LufZ7xOYoteScZL5WvumvrdNS9naI8BZk + WtsTl98Z2GhuZPKpOQtMOPXC38qEuNc5UPJhb3Oa + -----END CERTIFICATE-----""" + ) + + +def test_load_file_or_bytes_base64_der_with_b64_prefix(b64cert_with_prefix): + der = x509.load_file_or_bytes(b64cert_with_prefix) + cert = cx509.load_der_x509_certificate(der) + assert ( + cert.subject.rfc4514_string() == "CN=nifi.cdx.eitr.dev,L=Sykesville,ST=MD,C=US" + ) + + +def test_load_file_or_bytes_base64_der(b64cert): + der = x509.load_file_or_bytes(b64cert) + cert = cx509.load_der_x509_certificate(der) + assert ( + cert.subject.rfc4514_string() == "CN=nifi.cdx.eitr.dev,L=Sykesville,ST=MD,C=US" + ) + + +def test_load_file_or_bytes_pem(pemcert): + pem = x509.load_file_or_bytes(pemcert) + cert = cx509.load_pem_x509_certificate(pem) + assert ( + cert.subject.rfc4514_string() == "CN=nifi.cdx.eitr.dev,L=Sykesville,ST=MD,C=US" + ) From 025b3f073e3961f5f43ca3fb69cd6ef35668304f Mon Sep 17 00:00:00 2001 From: nicholasmhughes Date: Sun, 31 Mar 2024 11:05:03 -0400 Subject: [PATCH 094/102] use importorskip --- tests/pytests/functional/utils/test_x509.py | 11 +---------- 1 file changed, 1 insertion(+), 10 deletions(-) diff --git a/tests/pytests/functional/utils/test_x509.py b/tests/pytests/functional/utils/test_x509.py index 87b1e53c491..43d7832b494 100644 --- a/tests/pytests/functional/utils/test_x509.py +++ b/tests/pytests/functional/utils/test_x509.py @@ -4,16 +4,7 @@ import pytest import salt.utils.x509 as x509 -try: - import cryptography.x509 as cx509 - - HAS_LIBS = True -except ImportError: - HAS_LIBS = False - -pytestmark = [ - pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library") -] +cx509 = pytest.importorskip("cryptography.x509") @pytest.fixture From 0690b15be6ff4750424b0f3a81e9c4904e84419c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 1 Apr 2024 20:41:20 +0100 Subject: [PATCH 095/102] Windows needs more complex passwords. Use the auto generated one. Reduce fixture duplication. --- tests/pytests/conftest.py | 2 +- tests/pytests/integration/client/conftest.py | 6 ------ tests/pytests/integration/conftest.py | 6 ++++++ tests/pytests/integration/netapi/test_ssh_client.py | 6 ------ tests/pytests/integration/wheel/conftest.py | 6 ------ 5 files changed, 7 insertions(+), 19 deletions(-) diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index ad90853d370..591f1baafce 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -54,7 +54,7 @@ def salt_eauth_account_factory(): @pytest.fixture(scope="session") def salt_auto_account_factory(): - return TestAccount(username="saltdev_auto", password="saltdev") + return TestAccount(username="saltdev-auto") @pytest.fixture(scope="session") diff --git a/tests/pytests/integration/client/conftest.py b/tests/pytests/integration/client/conftest.py index b6f42b734a4..71156af257f 100644 --- a/tests/pytests/integration/client/conftest.py +++ b/tests/pytests/integration/client/conftest.py @@ -13,12 +13,6 @@ def client_config(salt_master): return config -@pytest.fixture(scope="module") -def salt_auto_account(salt_auto_account_factory): - with salt_auto_account_factory as account: - yield account - - @pytest.fixture def auth_creds(salt_auto_account): return { diff --git a/tests/pytests/integration/conftest.py b/tests/pytests/integration/conftest.py index de99d98bf3d..a1b88d18020 100644 --- a/tests/pytests/integration/conftest.py +++ b/tests/pytests/integration/conftest.py @@ -107,3 +107,9 @@ def salt_ssh_cli(salt_master, salt_ssh_roster_file, sshd_config_dir): client_key=str(sshd_config_dir / "client_key"), base_script_args=["--ignore-host-keys"], ) + + +@pytest.fixture(scope="module") +def salt_auto_account(salt_auto_account_factory): + with salt_auto_account_factory as account: + yield account diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py index 53c5910b476..9b834512fb8 100644 --- a/tests/pytests/integration/netapi/test_ssh_client.py +++ b/tests/pytests/integration/netapi/test_ssh_client.py @@ -62,12 +62,6 @@ def salt_auth_account_1(salt_auth_account_1_factory): yield account -@pytest.fixture(scope="module") -def salt_auto_account(salt_auto_account_factory): - with salt_auto_account_factory as account: - yield account - - def test_ssh(client, auth_creds, salt_ssh_roster_file, rosters_dir, ssh_priv_key): low = { "client": "ssh", diff --git a/tests/pytests/integration/wheel/conftest.py b/tests/pytests/integration/wheel/conftest.py index 3244fa05bfb..267a8c43674 100644 --- a/tests/pytests/integration/wheel/conftest.py +++ b/tests/pytests/integration/wheel/conftest.py @@ -18,12 +18,6 @@ def client(client_config): return salt.wheel.Wheel(client_config) -@pytest.fixture(scope="module") -def salt_auto_account(salt_auto_account_factory): - with salt_auto_account_factory as account: - yield account - - @pytest.fixture def auth_creds(salt_auto_account): return { From 42dd9fea5db29df4bdd4bd3f6a2eb94f03bd4518 Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Thu, 28 Mar 2024 16:52:20 -0500 Subject: [PATCH 096/102] Replace AlmaLinux with Rocky Linux --- .github/workflows/ci.yml | 84 ++++---- .github/workflows/nightly.yml | 72 +++---- .github/workflows/scheduled.yml | 72 +++---- .github/workflows/staging.yml | 64 +++--- .../test-package-downloads-action.yml | 36 ++-- cicd/golden-images.json | 192 +++++++++--------- cicd/shared-gh-workflows-context.yml | 2 +- tests/integration/pillar/test_git_pillar.py | 6 +- .../functional/states/pkgrepo/test_centos.py | 2 +- tests/pytests/functional/states/test_pkg.py | 2 +- .../pkg/integration/test_enabled_disabled.py | 9 +- .../pytests/pkg/integration/test_salt_user.py | 1 + .../pkg/integration/test_systemd_config.py | 2 + tests/pytests/pkg/integration/test_version.py | 1 + tests/support/pkg.py | 7 +- tools/ci.py | 2 +- tools/precommit/workflows.py | 32 +-- tools/utils/gh.py | 2 +- 18 files changed, 302 insertions(+), 286 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8b86865fd21..0d23bbba778 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -477,16 +477,16 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" - almalinux-8-pkg-tests: - name: Alma Linux 8 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8') }} + rockylinux-8-pkg-tests: + name: Rocky Linux 8 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8') }} needs: - prepare-workflow - build-pkgs-onedir - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -498,16 +498,16 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-8-arm64-pkg-tests: - name: Alma Linux 8 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8-arm64') }} + rockylinux-8-arm64-pkg-tests: + name: Rocky Linux 8 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8-arm64') }} needs: - prepare-workflow - build-pkgs-onedir - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -519,8 +519,8 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-pkg-tests: - name: Alma Linux 9 Package Test + rockylinux-9-pkg-tests: + name: Rocky Linux 9 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -528,7 +528,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -540,16 +540,16 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-arm64-pkg-tests: - name: Alma Linux 9 Arm64 Package Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-9-arm64') }} + rockylinux-9-arm64-pkg-tests: + name: Rocky Linux 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-9-arm64') }} needs: - prepare-workflow - build-pkgs-onedir - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1264,15 +1264,15 @@ jobs: workflow-slug: ci default-timeout: 180 - almalinux-8: - name: Alma Linux 8 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8') }} + rockylinux-8: + name: Rocky Linux 8 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8') }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1285,15 +1285,15 @@ jobs: workflow-slug: ci default-timeout: 180 - almalinux-8-arm64: - name: Alma Linux 8 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-8-arm64') }} + rockylinux-8-arm64: + name: Rocky Linux 8 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-8-arm64') }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1306,15 +1306,15 @@ jobs: workflow-slug: ci default-timeout: 180 - almalinux-9: - name: Alma Linux 9 Test + rockylinux-9: + name: Rocky Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1327,15 +1327,15 @@ jobs: workflow-slug: ci default-timeout: 180 - almalinux-9-arm64: - name: Alma Linux 9 Arm64 Test - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'almalinux-9-arm64') }} + rockylinux-9-arm64: + name: Rocky Linux 9 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] && contains(fromJSON(needs.prepare-workflow.outputs.os-labels), 'rockylinux-9-arm64') }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1806,10 +1806,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -1960,10 +1960,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -1985,10 +1985,10 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - almalinux-8-pkg-tests - - almalinux-8-arm64-pkg-tests - - almalinux-9-pkg-tests - - almalinux-9-arm64-pkg-tests + - rockylinux-8-pkg-tests + - rockylinux-8-arm64-pkg-tests + - rockylinux-9-pkg-tests + - rockylinux-9-arm64-pkg-tests - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 67b658a81cb..5414064ab00 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -534,8 +534,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" - almalinux-8-pkg-tests: - name: Alma Linux 8 Package Test + rockylinux-8-pkg-tests: + name: Rocky Linux 8 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -543,7 +543,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -555,8 +555,8 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-8-arm64-pkg-tests: - name: Alma Linux 8 Arm64 Package Test + rockylinux-8-arm64-pkg-tests: + name: Rocky Linux 8 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -564,7 +564,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -576,8 +576,8 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-pkg-tests: - name: Alma Linux 9 Package Test + rockylinux-9-pkg-tests: + name: Rocky Linux 9 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -585,7 +585,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -597,8 +597,8 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-arm64-pkg-tests: - name: Alma Linux 9 Arm64 Package Test + rockylinux-9-arm64-pkg-tests: + name: Rocky Linux 9 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -606,7 +606,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1321,15 +1321,15 @@ jobs: workflow-slug: nightly default-timeout: 360 - almalinux-8: - name: Alma Linux 8 Test + rockylinux-8: + name: Rocky Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1342,15 +1342,15 @@ jobs: workflow-slug: nightly default-timeout: 360 - almalinux-8-arm64: - name: Alma Linux 8 Arm64 Test + rockylinux-8-arm64: + name: Rocky Linux 8 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1363,15 +1363,15 @@ jobs: workflow-slug: nightly default-timeout: 360 - almalinux-9: - name: Alma Linux 9 Test + rockylinux-9: + name: Rocky Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1384,15 +1384,15 @@ jobs: workflow-slug: nightly default-timeout: 360 - almalinux-9-arm64: - name: Alma Linux 9 Arm64 Test + rockylinux-9-arm64: + name: Rocky Linux 9 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1863,10 +1863,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -2773,10 +2773,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -2856,10 +2856,10 @@ jobs: - build-pkgs-src - combine-all-code-coverage - publish-repositories - - almalinux-8-pkg-tests - - almalinux-8-arm64-pkg-tests - - almalinux-9-pkg-tests - - almalinux-9-arm64-pkg-tests + - rockylinux-8-pkg-tests + - rockylinux-8-arm64-pkg-tests + - rockylinux-9-pkg-tests + - rockylinux-9-arm64-pkg-tests - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index e92b3eecc57..b1ad18f9295 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -511,8 +511,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" - almalinux-8-pkg-tests: - name: Alma Linux 8 Package Test + rockylinux-8-pkg-tests: + name: Rocky Linux 8 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -520,7 +520,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -532,8 +532,8 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-8-arm64-pkg-tests: - name: Alma Linux 8 Arm64 Package Test + rockylinux-8-arm64-pkg-tests: + name: Rocky Linux 8 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -541,7 +541,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -553,8 +553,8 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-pkg-tests: - name: Alma Linux 9 Package Test + rockylinux-9-pkg-tests: + name: Rocky Linux 9 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -562,7 +562,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -574,8 +574,8 @@ jobs: skip-code-coverage: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-arm64-pkg-tests: - name: Alma Linux 9 Arm64 Package Test + rockylinux-9-arm64-pkg-tests: + name: Rocky Linux 9 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -583,7 +583,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1298,15 +1298,15 @@ jobs: workflow-slug: scheduled default-timeout: 360 - almalinux-8: - name: Alma Linux 8 Test + rockylinux-8: + name: Rocky Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1319,15 +1319,15 @@ jobs: workflow-slug: scheduled default-timeout: 360 - almalinux-8-arm64: - name: Alma Linux 8 Arm64 Test + rockylinux-8-arm64: + name: Rocky Linux 8 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1340,15 +1340,15 @@ jobs: workflow-slug: scheduled default-timeout: 360 - almalinux-9: - name: Alma Linux 9 Test + rockylinux-9: + name: Rocky Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1361,15 +1361,15 @@ jobs: workflow-slug: scheduled default-timeout: 360 - almalinux-9-arm64: - name: Alma Linux 9 Arm64 Test + rockylinux-9-arm64: + name: Rocky Linux 9 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1840,10 +1840,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -1996,10 +1996,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -2021,10 +2021,10 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - almalinux-8-pkg-tests - - almalinux-8-arm64-pkg-tests - - almalinux-9-pkg-tests - - almalinux-9-arm64-pkg-tests + - rockylinux-8-pkg-tests + - rockylinux-8-arm64-pkg-tests + - rockylinux-9-pkg-tests + - rockylinux-9-arm64-pkg-tests - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index d3ed1920952..3270e64ba8c 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -524,8 +524,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 nox-archive-hash: "${{ needs.prepare-workflow.outputs.nox-archive-hash }}" - almalinux-8-pkg-tests: - name: Alma Linux 8 Package Test + rockylinux-8-pkg-tests: + name: Rocky Linux 8 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -533,7 +533,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -545,8 +545,8 @@ jobs: skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-8-arm64-pkg-tests: - name: Alma Linux 8 Arm64 Package Test + rockylinux-8-arm64-pkg-tests: + name: Rocky Linux 8 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -554,7 +554,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -566,8 +566,8 @@ jobs: skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-pkg-tests: - name: Alma Linux 9 Package Test + rockylinux-9-pkg-tests: + name: Rocky Linux 9 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -575,7 +575,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -587,8 +587,8 @@ jobs: skip-code-coverage: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - almalinux-9-arm64-pkg-tests: - name: Alma Linux 9 Arm64 Package Test + rockylinux-9-arm64-pkg-tests: + name: Rocky Linux 9 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -596,7 +596,7 @@ jobs: - build-ci-deps uses: ./.github/workflows/test-packages-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1311,15 +1311,15 @@ jobs: workflow-slug: staging default-timeout: 180 - almalinux-8: - name: Alma Linux 8 Test + rockylinux-8: + name: Rocky Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8 + distro-slug: rockylinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1332,15 +1332,15 @@ jobs: workflow-slug: staging default-timeout: 180 - almalinux-8-arm64: - name: Alma Linux 8 Arm64 Test + rockylinux-8-arm64: + name: Rocky Linux 8 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-8-arm64 + distro-slug: rockylinux-8-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -1353,15 +1353,15 @@ jobs: workflow-slug: staging default-timeout: 180 - almalinux-9: - name: Alma Linux 9 Test + rockylinux-9: + name: Rocky Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9 + distro-slug: rockylinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 @@ -1374,15 +1374,15 @@ jobs: workflow-slug: staging default-timeout: 180 - almalinux-9-arm64: - name: Alma Linux 9 Arm64 Test + rockylinux-9-arm64: + name: Rocky Linux 9 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - build-ci-deps uses: ./.github/workflows/test-action-linux.yml with: - distro-slug: almalinux-9-arm64 + distro-slug: rockylinux-9-arm64 nox-session: ci-test-onedir platform: linux arch: arm64 @@ -2752,10 +2752,10 @@ jobs: - macos-12 - macos-13 - macos-13-arm64 - - almalinux-8 - - almalinux-8-arm64 - - almalinux-9 - - almalinux-9-arm64 + - rockylinux-8 + - rockylinux-8-arm64 + - rockylinux-9 + - rockylinux-9-arm64 - amazonlinux-2 - amazonlinux-2-arm64 - amazonlinux-2023 @@ -2777,10 +2777,10 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - almalinux-8-pkg-tests - - almalinux-8-arm64-pkg-tests - - almalinux-9-pkg-tests - - almalinux-9-arm64-pkg-tests + - rockylinux-8-pkg-tests + - rockylinux-8-arm64-pkg-tests + - rockylinux-9-pkg-tests + - rockylinux-9-arm64-pkg-tests - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index e4bd6af0fd8..68c954025c9 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -69,24 +69,6 @@ jobs: fail-fast: false matrix: include: - - distro-slug: almalinux-8 - arch: x86_64 - pkg-type: package - - distro-slug: almalinux-8-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: almalinux-8-arm64 - arch: arm64 - pkg-type: package - - distro-slug: almalinux-9 - arch: x86_64 - pkg-type: package - - distro-slug: almalinux-9-arm64 - arch: aarch64 - pkg-type: package - - distro-slug: almalinux-9-arm64 - arch: arm64 - pkg-type: package - distro-slug: amazonlinux-2 arch: x86_64 pkg-type: package @@ -159,6 +141,24 @@ jobs: - distro-slug: photonos-5-arm64 arch: arm64 pkg-type: package + - distro-slug: rockylinux-8 + arch: x86_64 + pkg-type: package + - distro-slug: rockylinux-8-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: rockylinux-8-arm64 + arch: arm64 + pkg-type: package + - distro-slug: rockylinux-9 + arch: x86_64 + pkg-type: package + - distro-slug: rockylinux-9-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: rockylinux-9-arm64 + arch: arm64 + pkg-type: package - distro-slug: ubuntu-20.04 arch: x86_64 pkg-type: package diff --git a/cicd/golden-images.json b/cicd/golden-images.json index 7c6e621bc9f..f55c5b18fa5 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -1,48 +1,8 @@ { - "almalinux-8-arm64": { - "ami": "ami-045a65c1d4ceb04a5", - "ami_description": "CI Image of AlmaLinux 8 arm64", - "ami_name": "salt-project/ci/almalinux/8/arm64/20240209.1843", - "arch": "arm64", - "cloudwatch-agent-available": "true", - "instance_type": "m6g.large", - "is_windows": "false", - "ssh_username": "ec2-user" - }, - "almalinux-8": { - "ami": "ami-0b9c2b7ba679e691d", - "ami_description": "CI Image of AlmaLinux 8 x86_64", - "ami_name": "salt-project/ci/almalinux/8/x86_64/20240209.1843", - "arch": "x86_64", - "cloudwatch-agent-available": "true", - "instance_type": "t3a.large", - "is_windows": "false", - "ssh_username": "ec2-user" - }, - "almalinux-9-arm64": { - "ami": "ami-0cbdf762adc955d47", - "ami_description": "CI Image of AlmaLinux 9 arm64", - "ami_name": "salt-project/ci/almalinux/9/arm64/20240209.1844", - "arch": "arm64", - "cloudwatch-agent-available": "true", - "instance_type": "m6g.large", - "is_windows": "false", - "ssh_username": "ec2-user" - }, - "almalinux-9": { - "ami": "ami-086c8ef0ef6951a8f", - "ami_description": "CI Image of AlmaLinux 9 x86_64", - "ami_name": "salt-project/ci/almalinux/9/x86_64/20240209.1843", - "arch": "x86_64", - "cloudwatch-agent-available": "true", - "instance_type": "t3a.large", - "is_windows": "false", - "ssh_username": "ec2-user" - }, "amazonlinux-2-arm64": { - "ami": "ami-07bc422e281c67f10", + "ami": "ami-0fa1d515b17aa5832", "ami_description": "CI Image of AmazonLinux 2 arm64", - "ami_name": "salt-project/ci/amazonlinux/2/arm64/20240209.1843", + "ami_name": "salt-project/ci/amazonlinux/2/arm64/20240325.2133", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -50,9 +10,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2": { - "ami": "ami-01ecdeb9a8251824e", + "ami": "ami-0c9a41917d788911e", "ami_description": "CI Image of AmazonLinux 2 x86_64", - "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240209.1843", + "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -60,9 +20,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2023-arm64": { - "ami": "ami-0b8a0efa9ea7ebfa4", + "ami": "ami-00644e6cc81cb8fc0", "ami_description": "CI Image of AmazonLinux 2023 arm64", - "ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240209.1844", + "ami_name": "salt-project/ci/amazonlinux/2023/arm64/20240325.2133", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -70,9 +30,9 @@ "ssh_username": "ec2-user" }, "amazonlinux-2023": { - "ami": "ami-0f013d0d9dbaf3b06", + "ami": "ami-01ba1cac2a9ba4845", "ami_description": "CI Image of AmazonLinux 2023 x86_64", - "ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240209.1844", + "ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -90,9 +50,9 @@ "ssh_username": "arch" }, "centos-7-arm64": { - "ami": "ami-09c1e87fa7a2be337", + "ami": "ami-0a0c4ce5d61416643", "ami_description": "CI Image of CentOS 7 arm64", - "ami_name": "salt-project/ci/centos/7/arm64/20240209.1843", + "ami_name": "salt-project/ci/centos/7/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -100,9 +60,9 @@ "ssh_username": "centos" }, "centos-7": { - "ami": "ami-010faf67fdabfbcdf", + "ami": "ami-06fec7a8fe157fe7d", "ami_description": "CI Image of CentOS 7 x86_64", - "ami_name": "salt-project/ci/centos/7/x86_64/20240209.1843", + "ami_name": "salt-project/ci/centos/7/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -110,9 +70,9 @@ "ssh_username": "centos" }, "debian-10-arm64": { - "ami": "ami-018ff5d81815e307f", + "ami": "ami-0a4d0583945dba7a7", "ami_description": "CI Image of Debian 10 arm64", - "ami_name": "salt-project/ci/debian/10/arm64/20240209.1843", + "ami_name": "salt-project/ci/debian/10/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -120,9 +80,9 @@ "ssh_username": "admin" }, "debian-10": { - "ami": "ami-033f768666c97d386", + "ami": "ami-0f474f6f3b4f1a981", "ami_description": "CI Image of Debian 10 x86_64", - "ami_name": "salt-project/ci/debian/10/x86_64/20240209.1843", + "ami_name": "salt-project/ci/debian/10/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -130,9 +90,9 @@ "ssh_username": "admin" }, "debian-11-arm64": { - "ami": "ami-02dfec8b374ad8fc8", + "ami": "ami-0e1d6f34aaeba1e58", "ami_description": "CI Image of Debian 11 arm64", - "ami_name": "salt-project/ci/debian/11/arm64/20240209.1843", + "ami_name": "salt-project/ci/debian/11/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -140,9 +100,9 @@ "ssh_username": "admin" }, "debian-11": { - "ami": "ami-06e4f77ed230e2def", + "ami": "ami-012327dae48ce80ac", "ami_description": "CI Image of Debian 11 x86_64", - "ami_name": "salt-project/ci/debian/11/x86_64/20240209.1843", + "ami_name": "salt-project/ci/debian/11/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -150,9 +110,9 @@ "ssh_username": "admin" }, "debian-12-arm64": { - "ami": "ami-05c7771a9ec62dfb2", + "ami": "ami-0527ef47cece68f54", "ami_description": "CI Image of Debian 12 arm64", - "ami_name": "salt-project/ci/debian/12/arm64/20240209.1843", + "ami_name": "salt-project/ci/debian/12/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -160,9 +120,9 @@ "ssh_username": "admin" }, "debian-12": { - "ami": "ami-0ac4b96d4ae5faa23", + "ami": "ami-0d9d685ae10656958", "ami_description": "CI Image of Debian 12 x86_64", - "ami_name": "salt-project/ci/debian/12/x86_64/20240209.1843", + "ami_name": "salt-project/ci/debian/12/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -170,9 +130,9 @@ "ssh_username": "admin" }, "fedora-39-arm64": { - "ami": "ami-04f69299edce3ff91", + "ami": "ami-00d2f2e1fccac457d", "ami_description": "CI Image of Fedora 39 arm64", - "ami_name": "salt-project/ci/fedora/39/arm64/20240209.1844", + "ami_name": "salt-project/ci/fedora/39/arm64/20240325.2133", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -180,9 +140,9 @@ "ssh_username": "fedora" }, "fedora-39": { - "ami": "ami-033bf14cad5d795a2", + "ami": "ami-072c01a40a6519153", "ami_description": "CI Image of Fedora 39 x86_64", - "ami_name": "salt-project/ci/fedora/39/x86_64/20240209.1844", + "ami_name": "salt-project/ci/fedora/39/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -190,9 +150,9 @@ "ssh_username": "fedora" }, "opensuse-15": { - "ami": "ami-023efb1898809e8fe", + "ami": "ami-04cfbfd3c214348bc", "ami_description": "CI Image of Opensuse 15 x86_64", - "ami_name": "salt-project/ci/opensuse/15/x86_64/20240209.1844", + "ami_name": "salt-project/ci/opensuse/15/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -200,9 +160,9 @@ "ssh_username": "ec2-user" }, "photonos-4-arm64": { - "ami": "ami-0d164263b5095dc45", + "ami": "ami-0bd76e6234ee685a7", "ami_description": "CI Image of PhotonOS 4 arm64", - "ami_name": "salt-project/ci/photonos/4/arm64/20240209.1844", + "ami_name": "salt-project/ci/photonos/4/arm64/20240325.2133", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -210,9 +170,9 @@ "ssh_username": "root" }, "photonos-4": { - "ami": "ami-0454d6f4e80b94412", + "ami": "ami-0b1947785de4b2a6e", "ami_description": "CI Image of PhotonOS 4 x86_64", - "ami_name": "salt-project/ci/photonos/4/x86_64/20240209.1844", + "ami_name": "salt-project/ci/photonos/4/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -220,9 +180,9 @@ "ssh_username": "root" }, "photonos-5-arm64": { - "ami": "ami-094f4c1e098fc2192", + "ami": "ami-0d02f34b9820752e4", "ami_description": "CI Image of PhotonOS 5 arm64", - "ami_name": "salt-project/ci/photonos/5/arm64/20240209.1844", + "ami_name": "salt-project/ci/photonos/5/arm64/20240325.2133", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -230,19 +190,59 @@ "ssh_username": "root" }, "photonos-5": { - "ami": "ami-037344b8f3d9e2650", + "ami": "ami-0fd58f07139e9622e", "ami_description": "CI Image of PhotonOS 5 x86_64", - "ami_name": "salt-project/ci/photonos/5/x86_64/20240209.1844", + "ami_name": "salt-project/ci/photonos/5/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "root" }, + "rockylinux-8-arm64": { + "ami": "ami-0e5d23f57141e5ac4", + "ami_description": "CI Image of RockyLinux 8 arm64", + "ami_name": "salt-project/ci/rockylinux/8/arm64/20240325.2134", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "rocky" + }, + "rockylinux-8": { + "ami": "ami-0e2cac6a847d700aa", + "ami_description": "CI Image of RockyLinux 8 x86_64", + "ami_name": "salt-project/ci/rockylinux/8/x86_64/20240325.2134", + "arch": "x86_64", + "cloudwatch-agent-available": "true", + "instance_type": "t3a.large", + "is_windows": "false", + "ssh_username": "rocky" + }, + "rockylinux-9-arm64": { + "ami": "ami-0054d3b25a08d2b41", + "ami_description": "CI Image of RockyLinux 9 arm64", + "ami_name": "salt-project/ci/rockylinux/9/arm64/20240325.2134", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "rocky" + }, + "rockylinux-9": { + "ami": "ami-042d3b81138968bdb", + "ami_description": "CI Image of RockyLinux 9 x86_64", + "ami_name": "salt-project/ci/rockylinux/9/x86_64/20240325.2134", + "arch": "x86_64", + "cloudwatch-agent-available": "true", + "instance_type": "t3a.large", + "is_windows": "false", + "ssh_username": "rocky" + }, "ubuntu-20.04-arm64": { - "ami": "ami-0eb0f403664076b82", + "ami": "ami-0bd2e3ee99c5a2f52", "ami_description": "CI Image of Ubuntu 20.04 arm64", - "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240209.1843", + "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -250,9 +250,9 @@ "ssh_username": "ubuntu" }, "ubuntu-20.04": { - "ami": "ami-07ad57960d22a8b65", + "ami": "ami-0fdc19cb94bc96db3", "ami_description": "CI Image of Ubuntu 20.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240209.1843", + "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -260,9 +260,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04-arm64": { - "ami": "ami-05197331792cbc895", + "ami": "ami-0690e86bc116a6245", "ami_description": "CI Image of Ubuntu 22.04 arm64", - "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240209.1843", + "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -270,9 +270,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04": { - "ami": "ami-01d9f296a69eaec3c", + "ami": "ami-0285c21e3abc8b2b2", "ami_description": "CI Image of Ubuntu 22.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240209.1843", + "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -280,9 +280,9 @@ "ssh_username": "ubuntu" }, "ubuntu-23.04-arm64": { - "ami": "ami-0511f6146f198b645", + "ami": "ami-09e0eb04bbf2a2f35", "ami_description": "CI Image of Ubuntu 23.04 arm64", - "ami_name": "salt-project/ci/ubuntu/23.04/arm64/20240209.1843", + "ami_name": "salt-project/ci/ubuntu/23.04/arm64/20240325.2134", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -290,9 +290,9 @@ "ssh_username": "ubuntu" }, "ubuntu-23.04": { - "ami": "ami-0f09467d281f1a312", + "ami": "ami-029edca569b26d625", "ami_description": "CI Image of Ubuntu 23.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20240209.1843", + "ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20240325.2134", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -300,9 +300,9 @@ "ssh_username": "ubuntu" }, "windows-2016": { - "ami": "ami-015acc248b175fb3c", + "ami": "ami-0474d8e7e13c81883", "ami_description": "CI Image of Windows 2016 x86_64", - "ami_name": "salt-project/ci/windows/2016/x86_64/20240209.1844", + "ami_name": "salt-project/ci/windows/2016/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -310,9 +310,9 @@ "ssh_username": "Administrator" }, "windows-2019": { - "ami": "ami-0318f14f8690bf17f", + "ami": "ami-07afee87d071123bf", "ami_description": "CI Image of Windows 2019 x86_64", - "ami_name": "salt-project/ci/windows/2019/x86_64/20240209.1844", + "ami_name": "salt-project/ci/windows/2019/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -320,9 +320,9 @@ "ssh_username": "Administrator" }, "windows-2022": { - "ami": "ami-0a78a28f614a662ad", + "ami": "ami-08f69046343f92cc4", "ami_description": "CI Image of Windows 2022 x86_64", - "ami_name": "salt-project/ci/windows/2022/x86_64/20240209.1844", + "ami_name": "salt-project/ci/windows/2022/x86_64/20240325.2133", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 2966a250604..fff98ba56b8 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -2,7 +2,7 @@ nox_version: "2022.8.7" python_version: "3.10.13" relenv_version: "0.15.1" mandatory_os_slugs: - - almalinux-9 + - rockylinux-9 - amazonlinux-2023-arm64 - archlinux-lts - macos-13-arm64 diff --git a/tests/integration/pillar/test_git_pillar.py b/tests/integration/pillar/test_git_pillar.py index 89835a069f8..e390da7aece 100644 --- a/tests/integration/pillar/test_git_pillar.py +++ b/tests/integration/pillar/test_git_pillar.py @@ -113,9 +113,9 @@ def _rand_key_name(length): def _check_skip(grains): - if grains["os"] == "CentOS Stream" and grains["osmajorrelease"] == 9: - return True - if grains["os"] == "AlmaLinux" and grains["osmajorrelease"] == 9: + if (grains["os"] in ("CentOS Stream", "AlmaLinux", "Rocky")) and grains[ + "osmajorrelease" + ] == 9: return True return False diff --git a/tests/pytests/functional/states/pkgrepo/test_centos.py b/tests/pytests/functional/states/pkgrepo/test_centos.py index c02da519d2f..0c57a114a0f 100644 --- a/tests/pytests/functional/states/pkgrepo/test_centos.py +++ b/tests/pytests/functional/states/pkgrepo/test_centos.py @@ -243,7 +243,7 @@ def copr_pkgrepo_with_comments_name(pkgrepo, grains): ): pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"])) if ( - grains["os"] in ("CentOS Stream", "AlmaLinux") + grains["os"] in ("CentOS Stream", "AlmaLinux", "Rocky") and grains["osmajorrelease"] == 9 or grains["osfinger"] == "Amazon Linux-2023" ): diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py index 035cfb6e958..67ca2500fe3 100644 --- a/tests/pytests/functional/states/test_pkg.py +++ b/tests/pytests/functional/states/test_pkg.py @@ -55,7 +55,7 @@ def PKG_TARGETS(grains): else: _PKG_TARGETS = ["wget", "zsh-html"] elif ( - grains["os"] in ("CentOS Stream", "AlmaLinux") + grains["os"] in ("CentOS Stream", "Rocky", "AlmaLinux") and grains["osmajorrelease"] == 9 ): _PKG_TARGETS = ["units", "zsh"] diff --git a/tests/pytests/pkg/integration/test_enabled_disabled.py b/tests/pytests/pkg/integration/test_enabled_disabled.py index d6ee20c6fd1..99097b187ee 100644 --- a/tests/pytests/pkg/integration/test_enabled_disabled.py +++ b/tests/pytests/pkg/integration/test_enabled_disabled.py @@ -11,7 +11,14 @@ def test_services(install_salt, salt_cli, salt_minion): services_enabled = [] if install_salt.distro_id in ("ubuntu", "debian"): services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] - elif install_salt.distro_id in ("almalinux", "centos", "redhat", "amzn", "fedora"): + elif install_salt.distro_id in ( + "almalinux", + "rocky", + "centos", + "redhat", + "amzn", + "fedora", + ): services_disabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] elif install_salt.distro_id == "photon": services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] diff --git a/tests/pytests/pkg/integration/test_salt_user.py b/tests/pytests/pkg/integration/test_salt_user.py index e1048b61cbf..74bf458a00b 100644 --- a/tests/pytests/pkg/integration/test_salt_user.py +++ b/tests/pytests/pkg/integration/test_salt_user.py @@ -190,6 +190,7 @@ def test_paths_log_rotation( if install_salt.distro_id not in ( "almalinux", + "rocky", "centos", "redhat", "amzn", diff --git a/tests/pytests/pkg/integration/test_systemd_config.py b/tests/pytests/pkg/integration/test_systemd_config.py index 8779827984c..1d016085e4c 100644 --- a/tests/pytests/pkg/integration/test_systemd_config.py +++ b/tests/pytests/pkg/integration/test_systemd_config.py @@ -16,6 +16,8 @@ def test_system_config(grains): if grains["osfinger"] in ( "AlmaLinux-8", "AlmaLinux-9", + "Rocky Linux-8", + "Rocky Linux-9", "CentOS Stream-8", "CentOS Linux-8", "CentOS Stream-9", diff --git a/tests/pytests/pkg/integration/test_version.py b/tests/pytests/pkg/integration/test_version.py index 1c193a882ab..24a665d4db7 100644 --- a/tests/pytests/pkg/integration/test_version.py +++ b/tests/pytests/pkg/integration/test_version.py @@ -127,6 +127,7 @@ def test_compare_pkg_versions_redhat_rc(version, install_salt): """ if install_salt.distro_id not in ( "almalinux", + "rocky", "centos", "redhat", "amzn", diff --git a/tests/support/pkg.py b/tests/support/pkg.py index 6163415d00b..d9c64877248 100644 --- a/tests/support/pkg.py +++ b/tests/support/pkg.py @@ -113,6 +113,7 @@ class SaltPkgInstall: def _default_pkg_mngr(self): if self.distro_id in ( "almalinux", + "rocky", "centos", "redhat", "amzn", @@ -129,6 +130,7 @@ class SaltPkgInstall: def _default_rm_pkg(self): if self.distro_id in ( "almalinux", + "rocky", "centos", "redhat", "amzn", @@ -144,6 +146,7 @@ class SaltPkgInstall: dbg_pkg = None if self.distro_id in ( "almalinux", + "rocky", "centos", "redhat", "amzn", @@ -167,6 +170,7 @@ class SaltPkgInstall: ] if self.distro_id in ( "almalinux", + "rocky", "centos", "redhat", "amzn", @@ -607,7 +611,7 @@ class SaltPkgInstall: "3006.0" ) distro_name = self.distro_name - if distro_name in ("almalinux", "centos", "fedora"): + if distro_name in ("almalinux", "rocky", "centos", "fedora"): distro_name = "redhat" root_url = "salt/py3/" if self.classic: @@ -615,6 +619,7 @@ class SaltPkgInstall: if self.distro_name in [ "almalinux", + "rocky", "redhat", "centos", "amazon", diff --git a/tools/ci.py b/tools/ci.py index 1f0ca25f810..7bdbc588fda 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -807,7 +807,7 @@ def pkg_matrix( if name == "amazonlinux": name = "amazon" - elif "centos" in name or name == "almalinux": + elif "centos" in name or name == "rockylinux": name = "redhat" elif "photon" in name: name = "photon" diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py index 3c8b032ad05..719198cfd09 100644 --- a/tools/precommit/workflows.py +++ b/tools/precommit/workflows.py @@ -107,16 +107,16 @@ def generate_workflows(ctx: Context): test_salt_listing = PlatformDefinitions( { "linux": [ - Linux(slug="almalinux-8", display_name="Alma Linux 8", arch="x86_64"), + Linux(slug="rockylinux-8", display_name="Rocky Linux 8", arch="x86_64"), Linux( - slug="almalinux-8-arm64", - display_name="Alma Linux 8 Arm64", + slug="rockylinux-8-arm64", + display_name="Rocky Linux 8 Arm64", arch="arm64", ), - Linux(slug="almalinux-9", display_name="Alma Linux 9", arch="x86_64"), + Linux(slug="rockylinux-9", display_name="Rocky Linux 9", arch="x86_64"), Linux( - slug="almalinux-9-arm64", - display_name="Alma Linux 9 Arm64", + slug="rockylinux-9-arm64", + display_name="Rocky Linux 9 Arm64", arch="arm64", ), Linux( @@ -211,26 +211,26 @@ def generate_workflows(ctx: Context): { "linux": [ Linux( - slug="almalinux-8", - display_name="Alma Linux 8", + slug="rockylinux-8", + display_name="Rocky Linux 8", arch="x86_64", pkg_type="rpm", ), Linux( - slug="almalinux-8-arm64", - display_name="Alma Linux 8 Arm64", + slug="rockylinux-8-arm64", + display_name="Rocky Linux 8 Arm64", arch="arm64", pkg_type="rpm", ), Linux( - slug="almalinux-9", - display_name="Alma Linux 9", + slug="rockylinux-9", + display_name="Rocky Linux 9", arch="x86_64", pkg_type="rpm", ), Linux( - slug="almalinux-9-arm64", - display_name="Alma Linux 9 Arm64", + slug="rockylinux-9-arm64", + display_name="Rocky Linux 9 Arm64", arch="arm64", pkg_type="rpm", ), @@ -419,7 +419,7 @@ def generate_workflows(ctx: Context): } ) rpm_slugs = ( - "almalinux", + "rockylinux", "amazonlinux", "centos", "fedora", @@ -508,7 +508,7 @@ def generate_workflows(ctx: Context): if slug.endswith("-arm64"): continue if not slug.startswith( - ("amazonlinux", "almalinux", "centos", "fedora", "photonos") + ("amazonlinux", "rockylinux", "centos", "fedora", "photonos") ): continue os_name, os_version = slug.split("-") diff --git a/tools/utils/gh.py b/tools/utils/gh.py index 6a23c687267..060bd7f7288 100644 --- a/tools/utils/gh.py +++ b/tools/utils/gh.py @@ -219,7 +219,7 @@ def download_pkgs_artifact( if slug.startswith(("debian", "ubuntu")): artifact_name += f"{arch}-deb" elif slug.startswith( - ("almalinux", "amazonlinux", "centos", "fedora", "opensuse", "photonos") + ("rockylinux", "amazonlinux", "centos", "fedora", "opensuse", "photonos") ): artifact_name += f"{arch}-rpm" else: From 48f95f90ac82ad3d7e8363d4e3aa35c4e7d03dbf Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 Apr 2024 15:28:47 +0100 Subject: [PATCH 097/102] Do not run nightly or scheduled builds on private repos --- .github/workflows/nightly.yml | 5 +++++ .github/workflows/scheduled.yml | 5 +++++ .../templates/workflow-requirements-check.yml.jinja | 5 +++++ 3 files changed, 15 insertions(+) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 5414064ab00..376d6412784 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -57,6 +57,11 @@ jobs: echo "${MSG}" echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" echo "requirements-met=false" >> "${GITHUB_OUTPUT}" + elif [ "${{ github.event.repository.private }}" = "true" ]; then + MSG="Not running workflow because ${{ github.repository }} is a private repository" + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "requirements-met=false" >> "${GITHUB_OUTPUT}" else MSG="Running workflow because ${{ github.repository }} is not a fork" echo "${MSG}" diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index b1ad18f9295..a7dc883125a 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -47,6 +47,11 @@ jobs: echo "${MSG}" echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" echo "requirements-met=false" >> "${GITHUB_OUTPUT}" + elif [ "${{ github.event.repository.private }}" = "true" ]; then + MSG="Not running workflow because ${{ github.repository }} is a private repository" + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "requirements-met=false" >> "${GITHUB_OUTPUT}" else MSG="Running workflow because ${{ github.repository }} is not a fork" echo "${MSG}" diff --git a/.github/workflows/templates/workflow-requirements-check.yml.jinja b/.github/workflows/templates/workflow-requirements-check.yml.jinja index 419ee3f6f52..67e04eef3e7 100644 --- a/.github/workflows/templates/workflow-requirements-check.yml.jinja +++ b/.github/workflows/templates/workflow-requirements-check.yml.jinja @@ -21,6 +21,11 @@ echo "${MSG}" echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" echo "requirements-met=false" >> "${GITHUB_OUTPUT}" + elif [ "${{ github.event.repository.private }}" = "true" ]; then + MSG="Not running workflow because ${{ github.repository }} is a private repository" + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "requirements-met=false" >> "${GITHUB_OUTPUT}" else MSG="Running workflow because ${{ github.repository }} is not a fork" echo "${MSG}" From 28814b9eab1fadf283e7a3f62f35276bf0113883 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 Apr 2024 15:52:09 +0100 Subject: [PATCH 098/102] Define additional `needs` for a few jobs --- .github/workflows/nightly.yml | 3 +++ .github/workflows/staging.yml | 3 +++ .github/workflows/templates/build-repos.yml.jinja | 1 + .github/workflows/templates/nightly.yml.jinja | 2 ++ 4 files changed, 9 insertions(+) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 376d6412784..763a9926677 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2012,6 +2012,7 @@ jobs: needs: - prepare-workflow - build-source-tarball + - build-pkgs-src strategy: fail-fast: false matrix: @@ -2765,6 +2766,7 @@ jobs: environment: nightly needs: - prepare-workflow + - build-docs - build-src-repo - build-deb-repo - build-rpm-repo @@ -2805,6 +2807,7 @@ jobs: - ubuntu-2204-arm64 steps: + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 3270e64ba8c..4d13d1fbc1b 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1852,6 +1852,7 @@ jobs: needs: - prepare-workflow - build-source-tarball + - build-pkgs-src strategy: fail-fast: false matrix: @@ -2607,6 +2608,7 @@ jobs: environment: staging needs: - prepare-workflow + - build-docs - build-src-repo - build-deb-repo - build-rpm-repo @@ -2615,6 +2617,7 @@ jobs: - build-onedir-repo steps: + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment diff --git a/.github/workflows/templates/build-repos.yml.jinja b/.github/workflows/templates/build-repos.yml.jinja index 6584158d063..92f621c857e 100644 --- a/.github/workflows/templates/build-repos.yml.jinja +++ b/.github/workflows/templates/build-repos.yml.jinja @@ -27,6 +27,7 @@ - build-salt-onedir <%- elif type == 'src' %> - build-source-tarball + - build-pkgs-src <%- endif %> <%- include "build-{}-repo.yml.jinja".format(type) %> diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index 097ccc7d2bb..313e7297150 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -146,6 +146,7 @@ concurrency: environment: <{ gh_environment }> needs: - prepare-workflow + - build-docs <%- for need in build_repo_needs.iter(consume=True) %> - <{ need }> <%- endfor %> @@ -156,6 +157,7 @@ concurrency: <%- endif %> steps: + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment From 2d578e8ed0208991a04a40130c620488f1a07293 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 2 Apr 2024 18:14:38 +0100 Subject: [PATCH 099/102] Temporarily skip some tests on Rocky Linux 8 Arm64 --- .../functional/transport/ipc/test_pub_server_channel.py | 4 ++++ tests/pytests/functional/transport/server/test_req_channel.py | 4 ++++ .../functional/transport/zeromq/test_pub_server_channel.py | 4 ++++ tests/pytests/functional/utils/test_process.py | 4 ++++ tests/pytests/functional/utils/user/test_chugid_and_umask.py | 4 ++++ 5 files changed, 20 insertions(+) diff --git a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py index d0310762283..2ea4470b24d 100644 --- a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py @@ -17,6 +17,10 @@ pytestmark = [ pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", ), + pytest.mark.skipif( + "grains['osfinger'] == 'Rocky Linux-8' and grains['osarch'] == 'aarch64'", + reason="Temporarily skip on Rocky Linux 8 Arm64", + ), ] diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py index b49e808f273..2eac76d352c 100644 --- a/tests/pytests/functional/transport/server/test_req_channel.py +++ b/tests/pytests/functional/transport/server/test_req_channel.py @@ -23,6 +23,10 @@ pytestmark = [ reason="These tests are currently broken on spawning platforms. Need to be rewritten.", ), pytest.mark.slow_test, + pytest.mark.skipif( + "grains['osfinger'] == 'Rocky Linux-8' and grains['osarch'] == 'aarch64'", + reason="Temporarily skip on Rocky Linux 8 Arm64", + ), ] diff --git a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py index 4e851a52fd3..50c8ff1c817 100644 --- a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py @@ -22,6 +22,10 @@ pytestmark = [ pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", ), + pytest.mark.skipif( + "grains['osfinger'] == 'Rocky Linux-8' and grains['osarch'] == 'aarch64'", + reason="Temporarily skip on Rocky Linux 8 Arm64", + ), ] diff --git a/tests/pytests/functional/utils/test_process.py b/tests/pytests/functional/utils/test_process.py index bbb83638761..ed165ea3e91 100644 --- a/tests/pytests/functional/utils/test_process.py +++ b/tests/pytests/functional/utils/test_process.py @@ -24,6 +24,10 @@ def process_manager(): _process_manager.terminate() +@pytest.mark.skipif( + "grains['osfinger'] == 'Rocky Linux-8' and grains['osarch'] == 'aarch64'", + reason="Temporarily skip on Rocky Linux 8 Arm64", +) def test_process_manager_60749(process_manager): """ Regression test for issue #60749 diff --git a/tests/pytests/functional/utils/user/test_chugid_and_umask.py b/tests/pytests/functional/utils/user/test_chugid_and_umask.py index 1a02efb528a..95a76519a94 100644 --- a/tests/pytests/functional/utils/user/test_chugid_and_umask.py +++ b/tests/pytests/functional/utils/user/test_chugid_and_umask.py @@ -20,6 +20,10 @@ pytestmark = [ pytest.mark.skip_if_not_root, pytest.mark.skip_on_windows, pytest.mark.skip_initial_gh_actions_failure(skip=_check_skip), + pytest.mark.skipif( + "grains['osfinger'] == 'Rocky Linux-8' and grains['osarch'] == 'aarch64'", + reason="Temporarily skip on Rocky Linux 8 Arm64", + ), ] From 1998ba3424a56c1fc00a5845f447ca6ea9f8a514 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 Apr 2024 08:59:43 +0100 Subject: [PATCH 100/102] Remove extra logic missed in 68131ce7ab248255feb50436a8b1c6c44813c762 This allowed the tests in https://github.com/saltstack/salt/pull/66169 to pass when they shouldn't --- .github/workflows/test-action-macos.yml | 2 +- .github/workflows/test-action-windows.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 6b03a6cb558..a57b4cb133d 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -179,7 +179,7 @@ jobs: - name: Run Changed Tests id: run-fast-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" diff --git a/.github/workflows/test-action-windows.yml b/.github/workflows/test-action-windows.yml index 92b048f570e..bc187bca036 100644 --- a/.github/workflows/test-action-windows.yml +++ b/.github/workflows/test-action-windows.yml @@ -202,7 +202,7 @@ jobs: - name: Run Changed Tests id: run-fast-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ From f38c6bcbf7aa1ff1ffbfbfe10f82d21a9c3ff2e4 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 22 Mar 2024 08:51:50 +0000 Subject: [PATCH 101/102] Upgrade to ``pytest-salt-factories==1.0.1`` --- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/cloud.txt | 2 +- requirements/static/ci/py3.11/darwin.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.11/windows.txt | 2 +- requirements/static/ci/py3.12/cloud.txt | 2 +- requirements/static/ci/py3.12/darwin.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.12/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- 24 files changed, 24 insertions(+), 24 deletions(-) diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 8b2b724ea02..d1c5c0003e5 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.3.0 +pywinrm==0.4.3 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index a12c9dd1136..1b3ad3582f9 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -362,7 +362,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 9b4a26e9c4b..0765c8cfcad 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -366,7 +366,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index cb91db86959..355e8707ddd 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -400,7 +400,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 3ef697746c9..d3d317cc4f7 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -326,7 +326,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index 6049488e0c8..ede2f925742 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.3.0 +pywinrm==0.4.3 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 5fe774ff19a..64a1d77d362 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index 95679fe7c16..448598d6bbc 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -367,7 +367,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 1f843813884..219a0a3aede 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -399,7 +399,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index b4719ca0f76..d177283f91f 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -327,7 +327,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 3a53b608fef..3197ecf6c61 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -521,7 +521,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 57e7821892f..9f1aef065e6 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index f64a4f56789..95a90096cb0 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -367,7 +367,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index b5a06a73fb5..c09251291e4 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -399,7 +399,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index b2943dffcb2..bc42b1c08b2 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -327,7 +327,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index e8662fb128d..e6caac1247a 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.3.0 +pywinrm==0.4.3 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 94b016c1072..2d6d51c998e 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -370,7 +370,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 9604959fda4..c9578ffedae 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -397,7 +397,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 91d99d13095..6b3442a945d 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -330,7 +330,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index f2bbb436946..fbed2f95c05 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -47,7 +47,7 @@ pyspnego==0.9.0 # -r requirements/static/ci/cloud.in # requests-ntlm # smbprotocol -pywinrm==0.3.0 +pywinrm==0.4.3 # via -r requirements/static/ci/cloud.in requests-ntlm==1.2.0 # via pywinrm diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index f1a1c52e602..6b43e63a442 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -362,7 +362,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 69ff9ebcb6d..4c334b478ca 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -366,7 +366,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 10042421e25..7f33c8c1317 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -393,7 +393,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index a0d9a9aad3a..9d5a83bb9b4 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -326,7 +326,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0 +pytest-salt-factories==1.0.1 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories From fe6ba8953e53deafc4dbb6d68b2ce1c9a22028d3 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 3 Apr 2024 14:41:26 +0100 Subject: [PATCH 102/102] Ignore the hosts key since it's not using the test SSHD server --- tests/pytests/integration/netapi/test_ssh_client.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/pytests/integration/netapi/test_ssh_client.py b/tests/pytests/integration/netapi/test_ssh_client.py index ecd2ac788d1..b84a63a2f8c 100644 --- a/tests/pytests/integration/netapi/test_ssh_client.py +++ b/tests/pytests/integration/netapi/test_ssh_client.py @@ -275,6 +275,7 @@ def test_shell_inject_remote_port_forwards( "eauth": "auto", "username": salt_auto_account.username, "password": salt_auto_account.password, + "ignore_host_keys": True, } ret = client.run(low) assert path.exists() is False