From c9d18056e9c4e0c8490ab2f23c3ae4aa4262c8e7 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 23 Oct 2023 14:33:18 -0600 Subject: [PATCH 001/196] Initial port of unittest test_junos to pytest --- tests/pytests/unit/modules/test_junos.py | 2848 ++++++++++++++++++++++ tests/unit/modules/test_junos.py | 2766 --------------------- 2 files changed, 2848 insertions(+), 2766 deletions(-) create mode 100644 tests/pytests/unit/modules/test_junos.py delete mode 100644 tests/unit/modules/test_junos.py diff --git a/tests/pytests/unit/modules/test_junos.py b/tests/pytests/unit/modules/test_junos.py new file mode 100644 index 00000000000..fc6e0b92ec2 --- /dev/null +++ b/tests/pytests/unit/modules/test_junos.py @@ -0,0 +1,2848 @@ +""" + :codeauthor: Rajvi Dhimar +""" +import os + +import pytest +import yaml + +import salt.modules.junos as junos +from tests.support.mock import ANY, MagicMock, PropertyMock, call, mock_open, patch + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +try: + import jnpr.junos.op as tables_dir + import jxmlease # pylint: disable=unused-import + from jnpr.junos.device import Device + from jnpr.junos.exception import ConnectClosedError, LockError, UnlockError + from jnpr.junos.utils.config import Config + from jnpr.junos.utils.sw import SW + + HAS_JUNOS = True +except ImportError: + HAS_JUNOS = False + + +@pytest.mark.skipif( + not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" +) +@pytest.fixture +def mock_cp(*args, **kwargs): + pass + + +@pytest.fixture +def get_facts(): + facts = { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + } + return facts + + +@pytest.fixture +def make_connect(): + with patch("ncclient.manager.connect") as mock_connect: + dev = Device( + host="1.1.1.1", + user="test", + password="test123", + fact_style="old", + gather_facts=False, + ) + dev.open() + dev.timeout = 30 + dev.bind(cu=Config) + dev.bind(sw=SW) + yield dev + + +@pytest.fixture +def configure_loader_modules(mock_cp, get_facts, make_connect): + return { + junos: { + "__proxy__": { + "junos.conn": MagicMock(return_value=make_connect), + "junos.get_serialized_facts": MagicMock(return_value=get_facts), + "junos.reboot_active": MagicMock(return_value=True), + "junos.reboot_clear": MagicMock(return_value=True), + }, + "__salt__": { + "cp.get_template": MagicMock(return_value=mock_cp), + "cp.get_file": MagicMock(return_value=mock_cp), + "file.file_exists": MagicMock(return_value=True), + "slsutil.renderer": MagicMock( + return_value="set system host-name dummy" + ), + "event.fire_master": MagicMock(return_value=None), + }, + "_restart_connection": MagicMock(return_value=None), + }, + } + + +def raise_exception(*args, **kwargs): + raise Exception("Test exception") + + +def test__timeout_decorator(): + with patch("jnpr.junos.Device.timeout", new_callable=PropertyMock) as mock_timeout: + mock_timeout.return_value = 30 + + def function(x): + return x + + decorator = junos._timeout_decorator(function) + decorator("Test Mock", dev_timeout=10) + calls = [call(), call(10), call(30)] + mock_timeout.assert_has_calls(calls) + + +def test__timeout_cleankwargs_decorator(): + with patch("jnpr.junos.Device.timeout", new_callable=PropertyMock) as mock_timeout: + mock_timeout.return_value = 30 + + def function(x): + return x + + decorator = junos._timeout_decorator_cleankwargs(function) + decorator("Test Mock", dev_timeout=10, __pub_args="abc") + calls = [call(), call(10), call(30)] + mock_timeout.assert_has_calls(calls) + + +def test_facts_refresh(): + with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: + ret = dict() + ret["facts"] = { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + } + ret["out"] = True + assert junos.facts_refresh() == ret + + +def test_facts_refresh_exception(): + with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: + mock_facts_refresh.side_effect = raise_exception + ret = dict() + ret["message"] = 'Execution failed due to "Test exception"' + ret["out"] = False + assert junos.facts_refresh() == ret + + +def test_facts(): + ret = dict() + ret["facts"] = { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + } + ret["out"] = True + assert junos.facts() == ret + + +def test_facts_exception(): + with patch.dict(junos.__proxy__, {"junos.get_serialized_facts": raise_exception}): + ret = dict() + ret["message"] = 'Could not display facts due to "Test exception"' + ret["out"] = False + assert junos.facts() == ret + + +def test_set_hostname_without_args(): + ret = dict() + ret["message"] = "Please provide the hostname." + ret["out"] = False + assert junos.set_hostname() == ret + + +def test_set_hostname_load_called_with_valid_name(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load: + junos.set_hostname("test-name") + mock_load.assert_called_with("set system host-name test-name", format="set") + + +def test_set_hostname_raise_exception_for_load(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load: + mock_load.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not load configuration due to error "Test exception"' + ret["out"] = False + assert junos.set_hostname("Test-name") == ret + + +def test_set_hostname_raise_exception_for_commit_check(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not commit check due to error "Test exception"' + ret["out"] = False + assert junos.set_hostname("test-name") == ret + + +def test_set_hostname_one_arg_parsed_correctly(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": ["test-name", {"comment": "Committed via salt"}], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + junos.set_hostname("test-name", **args) + mock_commit.assert_called_with(comment="Committed via salt") + + +def test_set_hostname_more_than_one_args_parsed_correctly(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": [ + "test-name", + {"comment": "Committed via salt", "confirm": 5}, + ], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + junos.set_hostname("test-name", **args) + mock_commit.assert_called_with(comment="Committed via salt", confirm=5) + + +def test_set_hostname_successful_return_message(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": ["test-name", {"comment": "Committed via salt"}], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = "Successfully changed hostname." + ret["out"] = True + assert junos.set_hostname("test-name", **args) == ret + + +def test_set_hostname_raise_exception_for_commit(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: + mock_commit.side_effect = raise_exception + ret = dict() + ret[ + "message" + ] = 'Successfully loaded host-name but commit failed with "Test exception"' + ret["out"] = False + assert junos.set_hostname("test-name") == ret + + +def test_set_hostname_fail_commit_check(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: + mock_commit_check.return_value = False + ret = dict() + ret["out"] = False + ret["message"] = "Successfully loaded host-name but pre-commit check failed." + assert junos.set_hostname("test") == ret + + +def test_commit_without_args(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit.return_value = True + mock_commit_check.return_value = True + ret = dict() + ret["message"] = "Commit Successful." + ret["out"] = True + assert junos.commit() == ret + + +def test_commit_raise_commit_check_exception(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not perform commit check due to "Test exception"' + ret["out"] = False + assert junos.commit() == ret + + +def test_commit_raise_commit_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = dict() + ret["out"] = False + ret[ + "message" + ] = 'Commit check succeeded but actual commit failed with "Test exception"' + assert junos.commit() == ret + + +def test_commit_with_single_argument(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [{"sync": True}], + "sync": True, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.commit(**args) + mock_commit.assert_called_with(detail=False, sync=True) + + +def test_commit_with_multiple_arguments(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "comitted via salt", + "__pub_user": "root", + "__pub_arg": [ + {"comment": "comitted via salt", "confirm": 3, "detail": True} + ], + "confirm": 3, + "detail": True, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182856987820", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.commit(**args) + mock_commit.assert_called_with( + comment="comitted via salt", detail=True, confirm=3 + ) + + +def test_commit_pyez_commit_returning_false(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit.return_value = False + mock_commit_check.return_value = True + ret = dict() + ret["message"] = "Commit failed." + ret["out"] = False + assert junos.commit() == ret + + +def test_commit_pyez_commit_check_returns_false(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.return_value = False + ret = dict() + ret["out"] = False + ret["message"] = "Pre-commit check failed." + assert junos.commit() == ret + + +def test_rollback_exception(): + with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: + mock_rollback.side_effect = raise_exception + ret = dict() + ret["message"] = 'Rollback failed due to "Test exception"' + ret["out"] = False + assert junos.rollback() == ret + + +def test_rollback_without_args_success(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + mock_rollback.return_value = True + ret = dict() + ret["message"] = "Rollback successful" + ret["out"] = True + assert junos.rollback() == ret + + +def test_rollback_without_args_fail(): + with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: + mock_rollback.return_value = False + ret = dict() + ret["message"] = "Rollback failed" + ret["out"] = False + assert junos.rollback() == ret + + +def test_rollback_with_id(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + junos.rollback(id=5) + mock_rollback.assert_called_with(5) + + +def test_rollback_with_id_and_single_arg(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [2, {"confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221184518526067", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(id=2, **args) + mock_rollback.assert_called_with(2) + mock_commit.assert_called_with(confirm=2) + + +def test_rollback_with_id_and_multiple_args(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "comment": "Comitted via salt", + "__pub_user": "root", + "__pub_arg": [ + 2, + {"comment": "Comitted via salt", "dev_timeout": 40, "confirm": 1}, + ], + "confirm": 1, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221192708251721", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(id=2, **args) + mock_rollback.assert_called_with(2) + mock_commit.assert_called_with( + comment="Comitted via salt", confirm=1, dev_timeout=40 + ) + + +def test_rollback_with_only_single_arg(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [{"sync": True}], + "sync": True, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221193615696475", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(**args) + mock_rollback.assert_called_once_with(0) + mock_commit.assert_called_once_with(sync=True) + + +def test_rollback_with_only_multiple_args_no_id(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "comment": "Comitted via salt", + "__pub_user": "root", + "__pub_arg": [{"comment": "Comitted via salt", "confirm": 3, "sync": True}], + "confirm": 3, + "sync": True, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221193945996362", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(**args) + mock_rollback.assert_called_with(0) + mock_commit.assert_called_once_with( + sync=True, confirm=3, comment="Comitted via salt" + ) + + +def test_rollback_with_diffs_file_option_when_diff_is_None(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff: + mock_commit_check.return_value = True + mock_diff.return_value = "diff" + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221205153884009", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + "diffs_file": "/home/regress/diff", + } + junos.rollback(**args) + mock_fopen.assert_called_with("/home/regress/diff", "w") + + +def test_rollback_with_diffs_file_option(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff: + mock_commit_check.return_value = True + mock_diff.return_value = None + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221205153884009", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + "diffs_file": "/home/regress/diff", + } + junos.rollback(**args) + assert not mock_fopen.called + + +def test_rollback_commit_check_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not commit check due to "Test exception"' + ret["out"] = False + assert junos.rollback() == ret + + +def test_rollback_commit_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = dict() + ret[ + "message" + ] = 'Rollback successful but commit failed with error "Test exception"' + ret["out"] = False + assert junos.rollback() == ret + + +def test_rollback_commit_check_fails(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = False + ret = dict() + ret["message"] = "Rollback successful but pre-commit check failed." + ret["out"] = False + assert junos.rollback() == ret + + +def test_diff_without_args(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + junos.diff() + mock_diff.assert_called_with(rb_id=0) + + +def test_diff_with_arg(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + junos.diff(id=2) + mock_diff.assert_called_with(rb_id=2) + + +def test_diff_exception(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + mock_diff.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not get diff with error "Test exception"' + ret["out"] = False + assert junos.diff() == ret + + +def test_ping_without_args(): + ret = dict() + ret["message"] = "Please specify the destination ip to ping." + ret["out"] = False + assert junos.ping() == ret + + +def test_ping(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + junos.ping("1.1.1.1") + args = mock_execute.call_args + rpc = b"1.1.1.15" + mydgm = etree.tostring(args[0][0]) + assert etree.tostring(args[0][0]) == rpc + + +def test_ping_ttl(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + args = { + "__pub_user": "sudo_drajvi", + "__pub_arg": ["1.1.1.1", {"ttl": 3}], + "__pub_fun": "junos.ping", + "__pub_jid": "20170306165237683279", + "__pub_tgt": "mac_min", + "ttl": 3, + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.ping("1.1.1.1", **args) + exec_args = mock_execute.call_args + rpc = b"1.1.1.135" + assert etree.tostring(exec_args[0][0]) == rpc + + +def test_ping_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = dict() + ret["message"] = 'Execution failed due to "Test exception"' + ret["out"] = False + assert junos.ping("1.1.1.1") == ret + + +def test_cli_without_args(): + ret = dict() + ret["message"] = "Please provide the CLI command to be executed." + ret["out"] = False + assert junos.cli() == ret + + +def test_cli_with_format_as_empty_string(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + junos.cli("show version", format="") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.return_vale = "CLI result" + ret = dict() + ret["message"] = "CLI result" + ret["out"] = True + junos.cli("show version") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli_format_xml(): + with patch("salt.modules.junos.jxmlease.parse") as mock_jxml, patch( + "salt.modules.junos.etree.tostring" + ) as mock_to_string, patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.return_value = "test" + mock_jxml.return_value = "test" + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "xml"}], + "format": "xml", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = "test" + ret["out"] = True + assert junos.cli("show version", **args) == ret + mock_cli.assert_called_with("show version", "xml", warning=False) + mock_to_string.assert_called_once_with("test") + assert mock_jxml.called + + +def test_cli_exception_in_cli(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.side_effect = raise_exception + ret = dict() + ret["message"] = 'Execution failed due to "Test exception"' + ret["out"] = False + assert junos.cli("show version") == ret + + +def test_cli_output_save(): + with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( + "salt.utils.files.fopen" + ) as mock_fopen: + mock_cli.return_value = "Test return" + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], + "format": "text", + "dest": "/path/to/file", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = "Test return" + ret["out"] = True + assert junos.cli("show version", **args) == ret + mock_fopen.assert_called_with("/path/to/file", "w") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli_output_save_ioexception(): + with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( + "salt.utils.files.fopen" + ) as mock_fopen: + mock_cli.return_value = "Test return" + mock_fopen.side_effect = IOError() + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], + "format": "text", + "dest": "/path/to/file", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = 'Unable to open "/path/to/file" to write' + ret["out"] = False + assert junos.cli("show version", **args) == ret + + +def test_shutdown_without_args(): + ret = dict() + ret["message"] = "Provide either one of the arguments: shutdown or reboot." + ret["out"] = False + assert junos.shutdown() == ret + + +def test_shutdown_with_reboot_args(): + with patch("salt.modules.junos.SW.reboot") as mock_reboot: + ret = dict() + ret["message"] = "Successfully powered off/rebooted." + ret["out"] = True + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + assert mock_reboot.called + + +def test_shutdown_with_poweroff_args(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + ret = dict() + ret["message"] = "Successfully powered off/rebooted." + ret["out"] = True + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + assert mock_poweroff.called + + +def test_shutdown_with_shutdown_as_false(): + ret = dict() + ret["message"] = "Nothing to be done." + ret["out"] = False + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": False}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + + +def test_shutdown_with_in_min_arg(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + args = { + "__pub_user": "root", + "in_min": 10, + "__pub_arg": [{"in_min": 10, "shutdown": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222231445709212", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.shutdown(**args) + mock_poweroff.assert_called_with(in_min=10) + + +def test_shutdown_with_at_arg(): + with patch("salt.modules.junos.SW.reboot") as mock_reboot: + args = { + "__pub_user": "root", + "__pub_arg": [{"at": "12:00 pm", "reboot": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "201702276857", + "at": "12:00 pm", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.shutdown(**args) + mock_reboot.assert_called_with(at="12:00 pm") + + +def test_shutdown_fail_with_exception(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + mock_poweroff.side_effect = raise_exception + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": True}], + "shutdown": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = 'Could not poweroff/reboot because "Test exception"' + ret["out"] = False + assert junos.shutdown(**args) == ret + + +def test_install_config_without_args(): + ret = dict() + ret["message"] = "Please provide the salt path where the configuration is present" + ret["out"] = False + assert junos.install_config() == ret + + +def test_install_config_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = dict() + ret = dict() + ret["message"] = "Invalid file path." + ret["out"] = False + assert junos.install_config("path") == ret + + +def test_install_config_file_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = dict() + ret = dict() + ret["message"] = "Invalid file path." + ret["out"] = False + assert junos.install_config("path") == ret + + +def test_install_config(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("salt://actual/path/config.set") == ret + mock_load.assert_called_with(path="test/path/config", format="set") + + +def test_install_config_xml_file(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("salt://actual/path/config.xml") == ret + mock_load.assert_called_with(path="test/path/config", format="xml") + + +def test_install_config_text_file(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("salt://actual/path/config") == ret + mock_load.assert_called_with(path="test/path/config", format="text") + + +def test_install_config_cache_not_exists(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value=None), + "file.rmdir": MagicMock(return_value="True"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "tempfile.mkdtemp" + ) as mock_mkdtemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_mkdtemp.return_value = "/tmp/argr5351afd" + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert ( + junos.install_config("salt://actual/path/config", template_vars=True) + == ret + ) + mock_mkstemp.assert_called_with() + + +def test_install_config_replace(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"replace": True}], + "replace": True, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("salt://actual/path/config.set", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="set", merge=False + ) + + +def test_install_config_overwrite(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"overwrite": True}], + "overwrite": True, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("salt://actual/path/config.xml", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="xml", overwrite=True + ) + + +def test_install_config_overwrite_false(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"overwrite": False}], + "overwrite": False, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("salt://actual/path/config", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="text", merge=True + ) + + +def test_install_config_load_causes_exception(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_load.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not load configuration due to : "Test exception"' + ret["format"] = "set" + ret["out"] = False + assert junos.install_config(path="actual/path/config.set") == ret + + +def test_install_config_no_diff(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = None + ret = dict() + ret["message"] = "Configuration already applied!" + ret["out"] = True + assert junos.install_config("actual/path/config") == ret + + +def test_install_config_write_diff(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "copy/config/here"}], + "diffs_file": "copy/config/here", + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("actual/path/config", **args) == ret + mock_fopen.assert_called_with("copy/config/here", "w") + + +def test_install_config_write_diff_exception(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as mock_fopen, patch( + "salt.utils.stringutils.to_str" + ) as mock_strgutils, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_strgutils.side_effect = raise_exception + + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "copy/config/here"}], + "diffs_file": "copy/config/here", + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = dict() + ret["message"] = "Could not write into diffs_file due to: 'Test exception'" + ret["out"] = False + assert junos.install_config("actual/path/config", **args) == ret + + +def test_install_config_commit_params(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + args = { + "comment": "comitted via salt", + "__pub_user": "root", + "__pub_arg": [{"comment": "comitted via salt", "confirm": 3}], + "confirm": 3, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182856987820", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = "Successfully loaded and committed!" + ret["out"] = True + assert junos.install_config("actual/path/config", **args) == ret + mock_commit.assert_called_with(comment="comitted via salt", confirm=3) + + +def test_install_config_commit_check_fails(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = False + + ret = dict() + ret["message"] = ( + "Loaded configuration but commit check failed, hence rolling back" + " configuration." + ) + ret["out"] = False + assert junos.install_config("actual/path/config.xml") == ret + + +def test_install_config_commit_exception(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = dict() + ret[ + "message" + ] = 'Commit check successful but commit failed with "Test exception"' + ret["out"] = False + assert junos.install_config("actual/path/config") == ret + + +def test_install_config_test_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = dict() + ret["message"] = ( + "Commit check passed, but skipping commit for dry-run and rolling back" + " configuration." + ) + ret["out"] = True + assert junos.install_config("actual/path/config", test=True) == ret + mock_commit.assert_not_called() + + +def test_install_config_write_diff_dynamic_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = dict() + ret[ + "message" + ] = "Write diff is not supported with dynamic/ephemeral configuration mode" + ret["out"] = False + assert ( + junos.install_config( + "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" + ) + == ret + ) + mock_commit.assert_not_called() + + +def test_install_config_unknown_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = dict() + ret["message"] = "install_config failed due to: unsupported action: abcdef" + ret["out"] = False + assert junos.install_config("actual/path/config", mode="abcdef") == ret + mock_commit.assert_not_called() + + +def test_zeroize(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + result = junos.zeroize() + ret = dict() + ret["out"] = True + ret["message"] = "Completed zeroize and rebooted" + mock_cli.assert_called_once_with("request system zeroize") + assert result == ret + + +def test_zeroize_throw_exception(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not zeroize due to : "Test exception"' + ret["out"] = False + assert junos.zeroize() == ret + + +def test_install_os_without_args(): + ret = dict() + ret["message"] = "Please provide the salt path where the junos image is present." + ret["out"] = False + assert junos.install_os() == ret + + +def test_install_os_cp_fails(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="xxxx"), + "file.rmdir": MagicMock(return_value="True"), + }, + ): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = False + mock_install.return_value = ( + False, + "Invalid path. Please provide a valid image path", + ) + ret = dict() + ret["message"] = ( + "Installation failed. Reason: Invalid path. Please provide a valid" + " image path" + ) + ret["out"] = False + assert junos.install_os("salt://image/path/") == ret + + +def test_install_os_image_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = dict() + ret["message"] = "Invalid path. Please provide a valid image path" + ret["out"] = False + assert junos.install_os("/image/path/") == ret + + +def test_install_os(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = dict() + ret["out"] = True + ret["message"] = "Installed the os." + assert junos.install_os("path") == ret + + +def test_install_os_failure(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = False, "because we are testing failure" + ret = dict() + ret["out"] = False + ret["message"] = "Installation failed. Reason: because we are testing failure" + assert junos.install_os("path") == ret + + +def test_install_os_with_reboot_arg(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "jnpr.junos.utils.sw.SW.reboot" + ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.install_os", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret["message"] = "Successfully installed and rebooted!" + ret["out"] = True + assert junos.install_os("path", **args) == ret + + +def test_install_os_pyez_install_throws_exception(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.side_effect = raise_exception + ret = dict() + ret["message"] = 'Installation failed due to: "Test exception"' + ret["out"] = False + assert junos.install_os("path") == ret + + +def test_install_os_with_reboot_raises_exception(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "jnpr.junos.utils.sw.SW.reboot" + ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + mock_reboot.side_effect = raise_exception + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.install_os", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = dict() + ret[ + "message" + ] = 'Installation successful but reboot failed due to : "Test exception"' + ret["out"] = False + assert junos.install_os("path", **args) == ret + + +def test_install_os_no_copy(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = dict() + ret["out"] = True + ret["message"] = "Installed the os." + assert junos.install_os("path", no_copy=True) == ret + mock_install.assert_called_with( + "path", no_copy=True, progress=True, timeout=1800 + ) + mock_mkstemp.assert_not_called() + mock_safe_rm.assert_not_called() + + +def test_install_os_issu(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = dict() + ret["out"] = True + ret["message"] = "Installed the os." + assert junos.install_os("path", issu=True) == ret + mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) + + +def test_install_os_add_params(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = dict() + ret["out"] = True + ret["message"] = "Installed the os." + remote_path = "/path/to/file" + assert ( + junos.install_os("path", remote_path=remote_path, nssu=True, validate=True) + == ret + ) + mock_install.assert_called_with( + ANY, + nssu=True, + remote_path=remote_path, + progress=True, + validate=True, + timeout=1800, + ) + + +def test_file_copy_without_args(): + pytest.raises(TypeError, junos.file_copy) + + +@patch("paramiko.SSHClient") +@patch("scp.SCPClient.put") +@patch("scp.SCPClient.__init__") +def test_file_copy_invalid_src(mock_scpclient, mock_put, mock_ssh): + mock_scpclient.return_value = None + invalid_path = "invalid/file/path" + mock_put.side_effect = Exception(invalid_path) + with patch("os.path.isfile") as mock_isfile: + mock_isfile.return_value = False + ret = dict() + ret["message"] = 'Could not copy file : "invalid/file/path"' + ret["out"] = False + assert junos.file_copy(invalid_path, "file") == ret + + +def test_file_copy_without_dest(): + pytest.raises(TypeError, junos.file_copy, src="/home/user/config.set") + + +def test_file_copy(): + with patch("salt.modules.junos.SCP") as mock_scp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_isfile.return_value = True + ret = dict() + ret["message"] = "Successfully copied file from test/src/file to file" + ret["out"] = True + assert junos.file_copy(dest="file", src="test/src/file") == ret + + +def test_file_copy_exception(): + with patch("salt.modules.junos.SCP") as mock_scp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_isfile.return_value = True + mock_scp.side_effect = raise_exception + ret = dict() + ret["message"] = 'Could not copy file : "Test exception"' + ret["out"] = False + assert junos.file_copy(dest="file", src="test/src/file") == ret + + +# These test cases test the __virtual__ function, used internally by salt +# to check if the given module is loadable. This function is not used by +# an external user. + + +def test_virtual_proxy_unavailable(): + with patch.dict(junos.__opts__, {}): + res = ( + False, + "The junos or dependent module could not be loaded: " + "junos-eznc or jxmlease or yamlordereddictloader or " + "proxy could not be loaded.", + ) + assert junos.__virtual__() == res + + +def test_virtual_all_true(): + with patch.dict(junos.__opts__, {"proxy": "test"}): + assert junos.__virtual__() == "junos" + + +def test_rpc_without_args(): + ret = dict() + ret["message"] = "Please provide the rpc to execute." + ret["out"] = False + assert junos.rpc() == ret + + +def test_rpc_get_config_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = dict() + ret["message"] = 'RPC execution failed due to "Test exception"' + ret["out"] = False + assert junos.rpc("get_config") == ret + + +def test_rpc_get_config_filter(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.return_value = etree.XML("") + args = { + "__pub_user": "root", + "__pub_arg": [ + "get-config", + {"filter": ""}, + ], + "__pub_fun": "junos.rpc", + "__pub_jid": "20170314162715866528", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "filter": "", + "__pub_ret": "", + } + junos.rpc("get-config", **args) + exec_args = mock_execute.call_args + expected_rpc = b'' + assert etree.tostring(exec_args[0][0]) == expected_rpc + + +def test_rpc_get_interface_information(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + junos.rpc("get-interface-information", format="json") + args = mock_execute.call_args + expected_rpc = b'' + assert etree.tostring(args[0][0]) == expected_rpc + + +def test_rpc_get_interface_information_with_kwargs(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + args = { + "__pub_user": "root", + "__pub_arg": [ + "get-interface-information", + "", + "text", + {"terse": True, "interface_name": "lo0", "format": "text"}, + ], + "format": "text", + "terse": True, + "__pub_fun": "junos.rpc", + "__pub_jid": "20170314160943363563", + "__pub_tgt": "mac_min", + "interface_name": "lo0", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rpc("get-interface-information", **args) + args = mock_execute.call_args + expected_rpc = b'lo0' + assert etree.tostring(args[0][0]) == expected_rpc + + +def test_rpc_get_chassis_inventory_filter_as_arg(): + with patch("salt.modules.junos.jxmlease.parse") as mock_jxmlease, patch( + "salt.modules.junos.etree.tostring" + ) as mock_tostring, patch( + "salt.modules.junos.logging.Logger.warning" + ) as mock_warning, patch( + "jnpr.junos.device.Device.execute" + ) as mock_execute: + junos.rpc( + "get-chassis-inventory", + filter="", + ) + mock_warning.assert_called_with( + 'Filter ignored as it is only used with "get-config" rpc' + ) + + +def test_rpc_get_interface_information_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = dict() + ret["message"] = 'RPC execution failed due to "Test exception"' + ret["out"] = False + assert junos.rpc("get_interface_information") == ret + + +def test_rpc_write_file_format_text(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.return_value = etree.XML("text rpc reply") + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file", format="text") + writes = m_open.write_calls() + assert writes == ["text rpc reply"], writes + + +def test_rpc_write_file_format_json(): + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file", format="json") + writes = m_open.write_calls() + assert writes == ["json rpc reply"], writes + + +def test_rpc_write_file(): + with patch("salt.modules.junos.jxmlease.parse") as mock_parse, patch( + "salt.modules.junos.etree.tostring" + ) as mock_tostring, patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_tostring.return_value = "xml rpc reply" + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file") + writes = m_open.write_calls() + assert writes == ["xml rpc reply"], writes + + +def test_lock_success(): + ret_exp = {"out": True, "message": "Successfully locked the configuration."} + ret = junos.lock() + assert ret == ret_exp + + +def test_lock_error(): + ret_exp = {"out": False, "message": 'Could not gain lock due to : "LockError"'} + with patch("jnpr.junos.utils.config.Config.lock") as mock_lock: + mock_lock.side_effect = LockError(None) + ret = junos.lock() + assert ret == ret_exp + + +def test_unlock_success(): + ret_exp = {"out": True, "message": "Successfully unlocked the configuration."} + ret = junos.unlock() + assert ret == ret_exp + + +def test_unlock_error(): + ret_exp = { + "out": False, + "message": 'Could not unlock configuration due to : "UnlockError"', + } + with patch("jnpr.junos.utils.config.Config.unlock") as mock_unlock: + mock_unlock.side_effect = UnlockError(None) + ret = junos.unlock() + assert ret == ret_exp + + +def test_load_none_path(): + ret_exp = { + "out": False, + "message": ("Please provide the salt path where the configuration is present"), + } + ret = junos.load() + assert ret == ret_exp + + +def test_load_wrong_tmp_file(): + ret_exp = { + "out": False, + "message": ( + 'Could not load configuration due to : "[Errno 2] No such file or' + " directory: '/pat/to/tmp/file'\"" + ), + "format": "text", + } + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open, patch( + "os.path.getsize" + ) as mock_getsize, patch("salt.utils.files.mkstemp") as mock_mkstmp: + mock_mkstmp.return_value = "/pat/to/tmp/file" + mock_getsize.return_value = 1000 + ret = junos.load("salt://path/to/file") + assert ret == ret_exp + + +def test_load_invalid_path(): + with patch("salt.utils.files.mkstemp") as mock_mkstmp: + mock_mkstmp.return_value = "/path/to/file" + pytest.raises(FileNotFoundError, junos.load, path="/path/to/file") + + +def test_load_no_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file") + mock_load.assert_called_with(format="text", path="/path/to/file") + assert ret == ret_exp + + +def test_load_xml_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("os.path.isfile") as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file.xml" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.xml") + mock_load.assert_called_with(format="xml", path="/path/to/file.xml") + assert ret == ret_exp + + +def test_load_xml_extension_with_kwargs(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen" + ) as fopen, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp: + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.xml", template_vars=dict(hostname="test")) + mock_load.assert_called_with( + format="xml", path="/path/to/file", template_vars={"hostname": "test"} + ) + assert ret == ret_exp + + +def test_load_set_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file.set" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.set") + mock_load.assert_called_with(format="set", path="/path/to/file.set") + assert ret == ret_exp + + +def test_load_replace_true(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", replace=True) + mock_load.assert_called_with(format="text", merge=False, path="/path/to/file") + assert ret == ret_exp + + +def test_load_replace_false(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", replace=False) + mock_load.assert_called_with(format="text", replace=False, path="/path/to/file") + assert ret == ret_exp + + +def test_load_overwrite_true(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", overwrite=True) + mock_load.assert_called_with( + format="text", overwrite=True, path="/path/to/file" + ) + assert ret == ret_exp + + +def test_load_overwrite_false(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", overwrite=False) + mock_load.assert_called_with(format="text", merge=True, path="/path/to/file") + assert ret == ret_exp + + +def test_load_error(): + ret_exp = { + "out": False, + "format": "text", + "message": 'Could not load configuration due to : "Test Error"', + } + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + mock_load.side_effect = Exception("Test Error") + ret = junos.load("/path/to/file") + assert ret == ret_exp + + +def test_load_template(): + ret_exp = { + "out": True, + "message": "Successfully loaded the configuration.", + } + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load: + ret = junos.load("tests/unit/modules/templates/basic2.j2", test=True) + assert ret == ret_exp + + +def test_commit_check_success(): + ret_exp = {"out": True, "message": "Commit check succeeded."} + ret = junos.commit_check() + assert ret == ret_exp + + +def test_commit_check_error(): + ret_exp = {"out": False, "message": "Commit check failed with "} + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_check: + mock_check.side_effect = Exception + ret = junos.commit_check() + assert ret == ret_exp + + +def test_get_table_wrong_path(): + table = "ModuleTable" + file = "sample.yml" + path = "/path/to/file" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Given table file {} cannot be located".format(file), + } + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch("jnpr.junos.factory.FactoryLoader.load") as mock_load: + ret = junos.get_table(table, file, path) + assert ret == ret_exp + mock_load.assert_not_called() + + +def test_get_table_no_path_no_file(): + table = "ModuleTable" + file = "inventory.yml" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Given table file {} cannot be located".format(file), + } + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( + "glob.glob" + ) as mock_fopen: + mock_fopen.return_value = [] + ret = junos.get_table(table, file) + assert ret == ret_exp + mock_load.assert_not_called() + + +def test_get_table_yaml_load_error(): + table = "ModuleTable" + file = "inventory.yml" + path = "/path/to/file" + message = "File not located test" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Uncaught exception during YAML Load - please report: {}".format( + message + ), + } + with patch("salt.utils.files.fopen", mock_open(), create=True) as mock_file, patch( + "glob.glob" + ) as mock_fopen, patch.object(yaml, "load") as mock_yamlload: + mock_fopen.return_value = ["/path/to/file"] + mock_yamlload.side_effect = OSError(message) + ret = junos.get_table(table, file, path) + assert ret == ret_exp + + +def test_get_table_api_error(): + table = "sample" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "sample", + "message": ( + "Uncaught exception during get API call - please report: '{}'".format( + str(table) + ) + ), + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "yaml.load" + ) as mock_yamlload, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open: + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] == ret_exp["out"] + assert ret["tablename"] == ret_exp["tablename"] + assert ret["message"] == ret_exp["message"] + + +def test_get_table_connect_closed_error(): + table = "ModuleTable" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": ( + "Got ConnectClosedError exception. Connection lost with Device(1.1.1.1)" + ), + } + with patch("jnpr.junos.factory.optable.OpTable.get") as mock_load, patch( + "yaml.load" + ) as mock_yamlload, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open: + dev = Device(host="1.1.1.1", user="rick") + mock_load.side_effect = ConnectClosedError(dev) + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] == ret_exp["out"] + assert ret["tablename"] == ret_exp["tablename"] + assert ret["message"] == ret_exp["message"] + + +def test_get_table_inventory(): + table = "ModuleTable" + file = "inventory.yml" + pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__)) + path = pyez_tables_path + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch("yaml.load") as mock_yamlload, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file, path) + assert ret["out"] + + +def test_get_table_no_path_inventory(): + table = "ModuleTable" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch("yaml.load") as mock_yamlload, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] diff --git a/tests/unit/modules/test_junos.py b/tests/unit/modules/test_junos.py deleted file mode 100644 index 8f23cb95f93..00000000000 --- a/tests/unit/modules/test_junos.py +++ /dev/null @@ -1,2766 +0,0 @@ -""" - :codeauthor: Rajvi Dhimar -""" -import os - -import pytest -import yaml - -import salt.modules.junos as junos -from tests.support.mixins import LoaderModuleMockMixin, XMLEqualityMixin -from tests.support.mock import ANY, MagicMock, PropertyMock, call, mock_open, patch -from tests.support.unit import TestCase - -try: - from lxml import etree -except ImportError: - import xml.etree.ElementTree as etree - -try: - import jnpr.junos.op as tables_dir - import jxmlease # pylint: disable=unused-import - from jnpr.junos.device import Device - from jnpr.junos.exception import ConnectClosedError, LockError, UnlockError - from jnpr.junos.utils.config import Config - from jnpr.junos.utils.sw import SW - - HAS_JUNOS = True -except ImportError: - HAS_JUNOS = False - - -@pytest.mark.skipif( - not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" -) -class Test_Junos_Module(TestCase, LoaderModuleMockMixin, XMLEqualityMixin): - def setup_loader_modules(self): - return { - junos: { - "__proxy__": { - "junos.conn": self.make_connect, - "junos.get_serialized_facts": self.get_facts, - "junos.reboot_active": MagicMock(return_value=True), - "junos.reboot_clear": MagicMock(return_value=True), - }, - "__salt__": { - "cp.get_template": self.mock_cp, - "cp.get_file": self.mock_cp, - "file.file_exists": MagicMock(return_value=True), - "slsutil.renderer": MagicMock( - return_value="set system host-name dummy" - ), - "event.fire_master": MagicMock(return_value=None), - }, - "_restart_connection": MagicMock(return_value=None), - }, - } - - def mock_cp(self, *args, **kwargs): - pass - - def make_connect(self): - with patch("ncclient.manager.connect") as mock_connect: - self.dev = Device( - host="1.1.1.1", - user="test", - password="test123", - fact_style="old", - gather_facts=False, - ) - self.dev.open() - self.dev.timeout = 30 - self.dev.bind(cu=Config) - self.dev.bind(sw=SW) - self.addCleanup(delattr, self, "dev") - return self.dev - - def raise_exception(self, *args, **kwargs): - raise Exception("Test exception") - - def get_facts(self): - facts = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - return facts - - def test__timeout_decorator(self): - with patch( - "jnpr.junos.Device.timeout", new_callable=PropertyMock - ) as mock_timeout: - mock_timeout.return_value = 30 - - def function(x): - return x - - decorator = junos._timeout_decorator(function) - decorator("Test Mock", dev_timeout=10) - calls = [call(), call(10), call(30)] - mock_timeout.assert_has_calls(calls) - - def test__timeout_cleankwargs_decorator(self): - with patch( - "jnpr.junos.Device.timeout", new_callable=PropertyMock - ) as mock_timeout: - mock_timeout.return_value = 30 - - def function(x): - return x - - decorator = junos._timeout_decorator_cleankwargs(function) - decorator("Test Mock", dev_timeout=10, __pub_args="abc") - calls = [call(), call(10), call(30)] - mock_timeout.assert_has_calls(calls) - - def test_facts_refresh(self): - with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - ret["out"] = True - self.assertEqual(junos.facts_refresh(), ret) - - def test_facts_refresh_exception(self): - with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: - mock_facts_refresh.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.facts_refresh(), ret) - - def test_facts(self): - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - ret["out"] = True - self.assertEqual(junos.facts(), ret) - - def test_facts_exception(self): - with patch.dict( - junos.__proxy__, {"junos.get_serialized_facts": self.raise_exception} - ): - ret = dict() - ret["message"] = 'Could not display facts due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.facts(), ret) - - def test_set_hostname_without_args(self): - ret = dict() - ret["message"] = "Please provide the hostname." - ret["out"] = False - self.assertEqual(junos.set_hostname(), ret) - - def test_set_hostname_load_called_with_valid_name(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load: - junos.set_hostname("test-name") - mock_load.assert_called_with("set system host-name test-name", format="set") - - def test_set_hostname_raise_exception_for_load(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load: - mock_load.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Could not load configuration due to error "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("Test-name"), ret) - - def test_set_hostname_raise_exception_for_commit_check(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to error "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("test-name"), ret) - - def test_set_hostname_one_arg_parsed_correctly(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": ["test-name", {"comment": "Committed via salt"}], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - junos.set_hostname("test-name", **args) - mock_commit.assert_called_with(comment="Committed via salt") - - def test_set_hostname_more_than_one_args_parsed_correctly(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": [ - "test-name", - {"comment": "Committed via salt", "confirm": 5}, - ], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - junos.set_hostname("test-name", **args) - mock_commit.assert_called_with(comment="Committed via salt", confirm=5) - - def test_set_hostname_successful_return_message(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": ["test-name", {"comment": "Committed via salt"}], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully changed hostname." - ret["out"] = True - self.assertEqual(junos.set_hostname("test-name", **args), ret) - - def test_set_hostname_raise_exception_for_commit(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Successfully loaded host-name but commit failed with "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("test-name"), ret) - - def test_set_hostname_fail_commit_check(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: - mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret[ - "message" - ] = "Successfully loaded host-name but pre-commit check failed." - self.assertEqual(junos.set_hostname("test"), ret) - - def test_commit_without_args(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit.return_value = True - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit Successful." - ret["out"] = True - self.assertEqual(junos.commit(), ret) - - def test_commit_raise_commit_check_exception(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not perform commit check due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.commit(), ret) - - def test_commit_raise_commit_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret["out"] = False - ret[ - "message" - ] = 'Commit check succeeded but actual commit failed with "Test exception"' - self.assertEqual(junos.commit(), ret) - - def test_commit_with_single_argument(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [{"sync": True}], - "sync": True, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.commit(**args) - mock_commit.assert_called_with(detail=False, sync=True) - - def test_commit_with_multiple_arguments(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - {"comment": "comitted via salt", "confirm": 3, "detail": True} - ], - "confirm": 3, - "detail": True, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182856987820", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.commit(**args) - mock_commit.assert_called_with( - comment="comitted via salt", detail=True, confirm=3 - ) - - def test_commit_pyez_commit_returning_false(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit.return_value = False - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit failed." - ret["out"] = False - self.assertEqual(junos.commit(), ret) - - def test_commit_pyez_commit_check_returns_false(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret["message"] = "Pre-commit check failed." - self.assertEqual(junos.commit(), ret) - - def test_rollback_exception(self): - with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: - mock_rollback.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Rollback failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_without_args_success(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - mock_rollback.return_value = True - ret = dict() - ret["message"] = "Rollback successful" - ret["out"] = True - self.assertEqual(junos.rollback(), ret) - - def test_rollback_without_args_fail(self): - with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: - mock_rollback.return_value = False - ret = dict() - ret["message"] = "Rollback failed" - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_with_id(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - junos.rollback(id=5) - mock_rollback.assert_called_with(5) - - def test_rollback_with_id_and_single_arg(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [2, {"confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221184518526067", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(id=2, **args) - mock_rollback.assert_called_with(2) - mock_commit.assert_called_with(confirm=2) - - def test_rollback_with_id_and_multiple_args(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "comment": "Comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - 2, - {"comment": "Comitted via salt", "dev_timeout": 40, "confirm": 1}, - ], - "confirm": 1, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221192708251721", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(id=2, **args) - mock_rollback.assert_called_with(2) - mock_commit.assert_called_with( - comment="Comitted via salt", confirm=1, dev_timeout=40 - ) - - def test_rollback_with_only_single_arg(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [{"sync": True}], - "sync": True, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221193615696475", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(**args) - mock_rollback.assert_called_once_with(0) - mock_commit.assert_called_once_with(sync=True) - - def test_rollback_with_only_multiple_args_no_id(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "comment": "Comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - {"comment": "Comitted via salt", "confirm": 3, "sync": True} - ], - "confirm": 3, - "sync": True, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221193945996362", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(**args) - mock_rollback.assert_called_with(0) - mock_commit.assert_called_once_with( - sync=True, confirm=3, comment="Comitted via salt" - ) - - def test_rollback_with_diffs_file_option_when_diff_is_None(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff: - mock_commit_check.return_value = True - mock_diff.return_value = "diff" - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221205153884009", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - "diffs_file": "/home/regress/diff", - } - junos.rollback(**args) - mock_fopen.assert_called_with("/home/regress/diff", "w") - - def test_rollback_with_diffs_file_option(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff: - mock_commit_check.return_value = True - mock_diff.return_value = None - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221205153884009", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - "diffs_file": "/home/regress/diff", - } - junos.rollback(**args) - assert not mock_fopen.called - - def test_rollback_commit_check_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_commit_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Rollback successful but commit failed with error "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_commit_check_fails(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = False - ret = dict() - ret["message"] = "Rollback successful but pre-commit check failed." - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_diff_without_args(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - junos.diff() - mock_diff.assert_called_with(rb_id=0) - - def test_diff_with_arg(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - junos.diff(id=2) - mock_diff.assert_called_with(rb_id=2) - - def test_diff_exception(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - mock_diff.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not get diff with error "Test exception"' - ret["out"] = False - self.assertEqual(junos.diff(), ret) - - def test_ping_without_args(self): - ret = dict() - ret["message"] = "Please specify the destination ip to ping." - ret["out"] = False - self.assertEqual(junos.ping(), ret) - - def test_ping(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - junos.ping("1.1.1.1") - args = mock_execute.call_args - rpc = "51.1.1.1" - self.assertEqualXML(args[0][0], rpc) - - def test_ping_ttl(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - args = { - "__pub_user": "sudo_drajvi", - "__pub_arg": ["1.1.1.1", {"ttl": 3}], - "__pub_fun": "junos.ping", - "__pub_jid": "20170306165237683279", - "__pub_tgt": "mac_min", - "ttl": 3, - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.ping("1.1.1.1", **args) - exec_args = mock_execute.call_args - rpc = "51.1.1.13" - self.assertEqualXML(exec_args[0][0], rpc) - - def test_ping_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.ping("1.1.1.1"), ret) - - def test_cli_without_args(self): - ret = dict() - ret["message"] = "Please provide the CLI command to be executed." - ret["out"] = False - self.assertEqual(junos.cli(), ret) - - def test_cli_with_format_as_empty_string(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - junos.cli("show version", format="") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.return_vale = "CLI result" - ret = dict() - ret["message"] = "CLI result" - ret["out"] = True - junos.cli("show version") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli_format_xml(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_jxml, patch( - "salt.modules.junos.etree.tostring" - ) as mock_to_string, patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.return_value = "test" - mock_jxml.return_value = "test" - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "xml"}], - "format": "xml", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "test" - ret["out"] = True - self.assertEqual(junos.cli("show version", **args), ret) - mock_cli.assert_called_with("show version", "xml", warning=False) - mock_to_string.assert_called_once_with("test") - assert mock_jxml.called - - def test_cli_exception_in_cli(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.cli("show version"), ret) - - def test_cli_output_save(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( - "salt.utils.files.fopen" - ) as mock_fopen: - mock_cli.return_value = "Test return" - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], - "format": "text", - "dest": "/path/to/file", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Test return" - ret["out"] = True - self.assertEqual(junos.cli("show version", **args), ret) - mock_fopen.assert_called_with("/path/to/file", "w") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli_output_save_ioexception(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( - "salt.utils.files.fopen" - ) as mock_fopen: - mock_cli.return_value = "Test return" - mock_fopen.side_effect = IOError() - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], - "format": "text", - "dest": "/path/to/file", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = 'Unable to open "/path/to/file" to write' - ret["out"] = False - self.assertEqual(junos.cli("show version", **args), ret) - - def test_shutdown_without_args(self): - ret = dict() - ret["message"] = "Provide either one of the arguments: shutdown or reboot." - ret["out"] = False - self.assertEqual(junos.shutdown(), ret) - - def test_shutdown_with_reboot_args(self): - with patch("salt.modules.junos.SW.reboot") as mock_reboot: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - assert mock_reboot.called - - def test_shutdown_with_poweroff_args(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - assert mock_poweroff.called - - def test_shutdown_with_shutdown_as_false(self): - ret = dict() - ret["message"] = "Nothing to be done." - ret["out"] = False - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": False}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - - def test_shutdown_with_in_min_arg(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - args = { - "__pub_user": "root", - "in_min": 10, - "__pub_arg": [{"in_min": 10, "shutdown": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222231445709212", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.shutdown(**args) - mock_poweroff.assert_called_with(in_min=10) - - def test_shutdown_with_at_arg(self): - with patch("salt.modules.junos.SW.reboot") as mock_reboot: - args = { - "__pub_user": "root", - "__pub_arg": [{"at": "12:00 pm", "reboot": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "201702276857", - "at": "12:00 pm", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.shutdown(**args) - mock_reboot.assert_called_with(at="12:00 pm") - - def test_shutdown_fail_with_exception(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - mock_poweroff.side_effect = self.raise_exception - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": True}], - "shutdown": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = 'Could not poweroff/reboot because "Test exception"' - ret["out"] = False - self.assertEqual(junos.shutdown(**args), ret) - - def test_install_config_without_args(self): - ret = dict() - ret[ - "message" - ] = "Please provide the salt path where the configuration is present" - ret["out"] = False - self.assertEqual(junos.install_config(), ret) - - def test_install_config_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False - self.assertEqual(junos.install_config("path"), ret) - - def test_install_config_file_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False - self.assertEqual(junos.install_config("path"), ret) - - def test_install_config(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.set"), ret - ) - mock_load.assert_called_with(path="test/path/config", format="set") - - def test_install_config_xml_file(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.xml"), ret - ) - mock_load.assert_called_with(path="test/path/config", format="xml") - - def test_install_config_text_file(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("salt://actual/path/config"), ret) - mock_load.assert_called_with(path="test/path/config", format="text") - - def test_install_config_cache_not_exists(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value=None), - "file.rmdir": MagicMock(return_value="True"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "tempfile.mkdtemp" - ) as mock_mkdtemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_mkdtemp.return_value = "/tmp/argr5351afd" - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config( - "salt://actual/path/config", template_vars=True - ), - ret, - ) - mock_mkstemp.assert_called_with() - - def test_install_config_replace(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"replace": True}], - "replace": True, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.set", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="set", merge=False - ) - - def test_install_config_overwrite(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"overwrite": True}], - "overwrite": True, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.xml", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="xml", overwrite=True - ) - - def test_install_config_overwrite_false(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"overwrite": False}], - "overwrite": False, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="text", merge=True - ) - - def test_install_config_load_causes_exception(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_load.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not load configuration due to : "Test exception"' - ret["format"] = "set" - ret["out"] = False - self.assertEqual(junos.install_config(path="actual/path/config.set"), ret) - - def test_install_config_no_diff(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = None - ret = dict() - ret["message"] = "Configuration already applied!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config"), ret) - - def test_install_config_write_diff(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "copy/config/here"}], - "diffs_file": "copy/config/here", - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - mock_fopen.assert_called_with("copy/config/here", "w") - - def test_install_config_write_diff_exception(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as mock_fopen, patch( - "salt.utils.stringutils.to_str" - ) as mock_strgutils, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_strgutils.side_effect = self.raise_exception - - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "copy/config/here"}], - "diffs_file": "copy/config/here", - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Could not write into diffs_file due to: 'Test exception'" - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - - def test_install_config_commit_params(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - args = { - "comment": "comitted via salt", - "__pub_user": "root", - "__pub_arg": [{"comment": "comitted via salt", "confirm": 3}], - "confirm": 3, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182856987820", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - mock_commit.assert_called_with(comment="comitted via salt", confirm=3) - - def test_install_config_commit_check_fails(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = False - - ret = dict() - ret["message"] = ( - "Loaded configuration but commit check failed, hence rolling back" - " configuration." - ) - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config.xml"), ret) - - def test_install_config_commit_exception(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Commit check successful but commit failed with "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config"), ret) - - def test_install_config_test_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret["message"] = ( - "Commit check passed, but skipping commit for dry-run and rolling back" - " configuration." - ) - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", test=True), ret) - mock_commit.assert_not_called() - - def test_install_config_write_diff_dynamic_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret[ - "message" - ] = "Write diff is not supported with dynamic/ephemeral configuration mode" - ret["out"] = False - self.assertEqual( - junos.install_config( - "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" - ), - ret, - ) - mock_commit.assert_not_called() - - def test_install_config_unknown_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "install_config failed due to: unsupported action: abcdef" - ret["out"] = False - self.assertEqual( - junos.install_config("actual/path/config", mode="abcdef"), ret - ) - mock_commit.assert_not_called() - - def test_zeroize(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - result = junos.zeroize() - ret = dict() - ret["out"] = True - ret["message"] = "Completed zeroize and rebooted" - mock_cli.assert_called_once_with("request system zeroize") - self.assertEqual(result, ret) - - def test_zeroize_throw_exception(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not zeroize due to : "Test exception"' - ret["out"] = False - self.assertEqual(junos.zeroize(), ret) - - def test_install_os_without_args(self): - ret = dict() - ret[ - "message" - ] = "Please provide the salt path where the junos image is present." - ret["out"] = False - self.assertEqual(junos.install_os(), ret) - - def test_install_os_cp_fails(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="xxxx"), - "file.rmdir": MagicMock(return_value="True"), - }, - ): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = False - mock_install.return_value = ( - False, - "Invalid path. Please provide a valid image path", - ) - ret = dict() - ret["message"] = ( - "Installation failed. Reason: Invalid path. Please provide a valid" - " image path" - ) - ret["out"] = False - self.assertEqual(junos.install_os("salt://image/path/"), ret) - - def test_install_os_image_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret["message"] = "Invalid path. Please provide a valid image path" - ret["out"] = False - self.assertEqual(junos.install_os("/image/path/"), ret) - - def test_install_os(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_failure(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = False, "because we are testing failure" - ret = dict() - ret["out"] = False - ret[ - "message" - ] = "Installation failed. Reason: because we are testing failure" - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_with_reboot_arg(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "jnpr.junos.utils.sw.SW.reboot" - ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.install_os", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully installed and rebooted!" - ret["out"] = True - self.assertEqual(junos.install_os("path", **args), ret) - - def test_install_os_pyez_install_throws_exception(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Installation failed due to: "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_with_reboot_raises_exception(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "jnpr.junos.utils.sw.SW.reboot" - ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - mock_reboot.side_effect = self.raise_exception - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.install_os", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret[ - "message" - ] = 'Installation successful but reboot failed due to : "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_os("path", **args), ret) - - def test_install_os_no_copy(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path", no_copy=True), ret) - mock_install.assert_called_with( - "path", no_copy=True, progress=True, timeout=1800 - ) - mock_mkstemp.assert_not_called() - mock_safe_rm.assert_not_called() - - def test_install_os_issu(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path", issu=True), ret) - mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) - - def test_install_os_add_params(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - remote_path = "/path/to/file" - self.assertEqual( - junos.install_os( - "path", remote_path=remote_path, nssu=True, validate=True - ), - ret, - ) - mock_install.assert_called_with( - ANY, - nssu=True, - remote_path=remote_path, - progress=True, - validate=True, - timeout=1800, - ) - - def test_file_copy_without_args(self): - self.assertRaises(TypeError, junos.file_copy) - - @patch("paramiko.SSHClient") - @patch("scp.SCPClient.put") - @patch("scp.SCPClient.__init__") - def test_file_copy_invalid_src(self, mock_scpclient, mock_put, mock_ssh): - mock_scpclient.return_value = None - invalid_path = "invalid/file/path" - mock_put.side_effect = Exception(invalid_path) - with patch("os.path.isfile") as mock_isfile: - mock_isfile.return_value = False - ret = dict() - ret["message"] = 'Could not copy file : "invalid/file/path"' - ret["out"] = False - self.assertEqual(junos.file_copy(invalid_path, "file"), ret) - - def test_file_copy_without_dest(self): - self.assertRaises(TypeError, junos.file_copy, src="/home/user/config.set") - - def test_file_copy(self): - with patch("salt.modules.junos.SCP") as mock_scp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_isfile.return_value = True - ret = dict() - ret["message"] = "Successfully copied file from test/src/file to file" - ret["out"] = True - self.assertEqual(junos.file_copy(dest="file", src="test/src/file"), ret) - - def test_file_copy_exception(self): - with patch("salt.modules.junos.SCP") as mock_scp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_isfile.return_value = True - mock_scp.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not copy file : "Test exception"' - ret["out"] = False - self.assertEqual(junos.file_copy(dest="file", src="test/src/file"), ret) - - # These test cases test the __virtual__ function, used internally by salt - # to check if the given module is loadable. This function is not used by - # an external user. - - def test_virtual_proxy_unavailable(self): - with patch.dict(junos.__opts__, {}): - res = ( - False, - "The junos or dependent module could not be loaded: " - "junos-eznc or jxmlease or yamlordereddictloader or " - "proxy could not be loaded.", - ) - self.assertEqual(junos.__virtual__(), res) - - def test_virtual_all_true(self): - with patch.dict(junos.__opts__, {"proxy": "test"}): - self.assertEqual(junos.__virtual__(), "junos") - - def test_rpc_without_args(self): - ret = dict() - ret["message"] = "Please provide the rpc to execute." - ret["out"] = False - self.assertEqual(junos.rpc(), ret) - - def test_rpc_get_config_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rpc("get_config"), ret) - - def test_rpc_get_config_filter(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.return_value = etree.XML("") - args = { - "__pub_user": "root", - "__pub_arg": [ - "get-config", - {"filter": ""}, - ], - "__pub_fun": "junos.rpc", - "__pub_jid": "20170314162715866528", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "filter": "", - "__pub_ret": "", - } - junos.rpc("get-config", **args) - exec_args = mock_execute.call_args - expected_rpc = ( - "' - ) - self.assertEqualXML(exec_args[0][0], expected_rpc) - - def test_rpc_get_interface_information(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - junos.rpc("get-interface-information", format="json") - args = mock_execute.call_args - expected_rpc = '' - self.assertEqualXML(args[0][0], expected_rpc) - - def test_rpc_get_interface_information_with_kwargs(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - args = { - "__pub_user": "root", - "__pub_arg": [ - "get-interface-information", - "", - "text", - {"terse": True, "interface_name": "lo0", "format": "text"}, - ], - "format": "text", - "terse": True, - "__pub_fun": "junos.rpc", - "__pub_jid": "20170314160943363563", - "__pub_tgt": "mac_min", - "interface_name": "lo0", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rpc("get-interface-information", **args) - args = mock_execute.call_args - expected_rpc = ( - '' - "lo0" - ) - self.assertEqualXML(etree.tostring(args[0][0]), expected_rpc) - - def test_rpc_get_chassis_inventory_filter_as_arg(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_jxmlease, patch( - "salt.modules.junos.etree.tostring" - ) as mock_tostring, patch( - "salt.modules.junos.logging.Logger.warning" - ) as mock_warning, patch( - "jnpr.junos.device.Device.execute" - ) as mock_execute: - junos.rpc( - "get-chassis-inventory", - filter="", - ) - mock_warning.assert_called_with( - 'Filter ignored as it is only used with "get-config" rpc' - ) - - def test_rpc_get_interface_information_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rpc("get_interface_information"), ret) - - def test_rpc_write_file_format_text(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.return_value = etree.XML( - "text rpc reply" - ) - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file", format="text") - writes = m_open.write_calls() - assert writes == ["text rpc reply"], writes - - def test_rpc_write_file_format_json(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file", format="json") - writes = m_open.write_calls() - assert writes == ["json rpc reply"], writes - - def test_rpc_write_file(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_parse, patch( - "salt.modules.junos.etree.tostring" - ) as mock_tostring, patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_tostring.return_value = "xml rpc reply" - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file") - writes = m_open.write_calls() - assert writes == ["xml rpc reply"], writes - - def test_lock_success(self): - ret_exp = {"out": True, "message": "Successfully locked the configuration."} - ret = junos.lock() - self.assertEqual(ret, ret_exp) - - def test_lock_error(self): - ret_exp = {"out": False, "message": 'Could not gain lock due to : "LockError"'} - with patch("jnpr.junos.utils.config.Config.lock") as mock_lock: - mock_lock.side_effect = LockError(None) - ret = junos.lock() - self.assertEqual(ret, ret_exp) - - def test_unlock_success(self): - ret_exp = {"out": True, "message": "Successfully unlocked the configuration."} - ret = junos.unlock() - self.assertEqual(ret, ret_exp) - - def test_unlock_error(self): - ret_exp = { - "out": False, - "message": 'Could not unlock configuration due to : "UnlockError"', - } - with patch("jnpr.junos.utils.config.Config.unlock") as mock_unlock: - mock_unlock.side_effect = UnlockError(None) - ret = junos.unlock() - self.assertEqual(ret, ret_exp) - - def test_load_none_path(self): - ret_exp = { - "out": False, - "message": ( - "Please provide the salt path where the configuration is present" - ), - } - ret = junos.load() - self.assertEqual(ret, ret_exp) - - def test_load_wrong_tmp_file(self): - ret_exp = { - "out": False, - "message": ( - 'Could not load configuration due to : "[Errno 2] No such file or' - " directory: '/pat/to/tmp/file'\"" - ), - "format": "text", - } - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("os.path.getsize") as mock_getsize, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_mkstmp.return_value = "/pat/to/tmp/file" - mock_getsize.return_value = 1000 - ret = junos.load("salt://path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_invalid_path(self): - with patch("salt.utils.files.mkstemp") as mock_mkstmp: - mock_mkstmp.return_value = "/path/to/file" - self.assertRaises(FileNotFoundError, junos.load, path="/path/to/file") - - def test_load_no_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file") - mock_load.assert_called_with(format="text", path="/path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_xml_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("os.path.isfile") as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file.xml" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.xml") - mock_load.assert_called_with(format="xml", path="/path/to/file.xml") - self.assertEqual(ret, ret_exp) - - def test_load_xml_extension_with_kwargs(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen" - ) as fopen, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.xml", template_vars=dict(hostname="test")) - mock_load.assert_called_with( - format="xml", path="/path/to/file", template_vars={"hostname": "test"} - ) - self.assertEqual(ret, ret_exp) - - def test_load_set_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file.set" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.set") - mock_load.assert_called_with(format="set", path="/path/to/file.set") - self.assertEqual(ret, ret_exp) - - def test_load_replace_true(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", replace=True) - mock_load.assert_called_with( - format="text", merge=False, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_replace_false(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", replace=False) - mock_load.assert_called_with( - format="text", replace=False, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_overwrite_true(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", overwrite=True) - mock_load.assert_called_with( - format="text", overwrite=True, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_overwrite_false(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", overwrite=False) - mock_load.assert_called_with( - format="text", merge=True, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_error(self): - ret_exp = { - "out": False, - "format": "text", - "message": 'Could not load configuration due to : "Test Error"', - } - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - mock_load.side_effect = Exception("Test Error") - ret = junos.load("/path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_template(self): - ret_exp = { - "out": True, - "message": "Successfully loaded the configuration.", - } - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load: - ret = junos.load("tests/unit/modules/templates/basic2.j2", test=True) - self.assertEqual(ret, ret_exp) - - def test_commit_check_success(self): - ret_exp = {"out": True, "message": "Commit check succeeded."} - ret = junos.commit_check() - self.assertEqual(ret, ret_exp) - - def test_commit_check_error(self): - ret_exp = {"out": False, "message": "Commit check failed with "} - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_check: - mock_check.side_effect = Exception - ret = junos.commit_check() - self.assertEqual(ret, ret_exp) - - def test_get_table_wrong_path(self): - table = "ModuleTable" - file = "sample.yml" - path = "/path/to/file" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Given table file {} cannot be located".format(file), - } - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.factory.FactoryLoader.load" - ) as mock_load: - ret = junos.get_table(table, file, path) - self.assertEqual(ret, ret_exp) - mock_load.assert_not_called() - - def test_get_table_no_path_no_file(self): - table = "ModuleTable" - file = "inventory.yml" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Given table file {} cannot be located".format(file), - } - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( - "glob.glob" - ) as mock_fopen: - mock_fopen.return_value = [] - ret = junos.get_table(table, file) - self.assertEqual(ret, ret_exp) - mock_load.assert_not_called() - - def test_get_table_yaml_load_error(self): - table = "ModuleTable" - file = "inventory.yml" - path = "/path/to/file" - message = "File not located test" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Uncaught exception during YAML Load - please report: {}".format( - message - ), - } - with patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as mock_file, patch("glob.glob") as mock_fopen, patch.object( - yaml, "load" - ) as mock_yamlload: - mock_fopen.return_value = ["/path/to/file"] - mock_yamlload.side_effect = OSError(message) - ret = junos.get_table(table, file, path) - self.assertEqual(ret, ret_exp) - - def test_get_table_api_error(self): - table = "sample" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "sample", - "message": ( - "Uncaught exception during get API call - please report: '{}'".format( - str(table) - ) - ), - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "yaml.load" - ) as mock_yamlload, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open: - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], ret_exp["out"]) - self.assertEqual(ret["tablename"], ret_exp["tablename"]) - self.assertEqual(ret["message"], ret_exp["message"]) - - def test_get_table_connect_closed_error(self): - table = "ModuleTable" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": ( - "Got ConnectClosedError exception. Connection lost with Device(1.1.1.1)" - ), - } - with patch("jnpr.junos.factory.optable.OpTable.get") as mock_load, patch( - "yaml.load" - ) as mock_yamlload, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open: - dev = Device(host="1.1.1.1", user="rick") - mock_load.side_effect = ConnectClosedError(dev) - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], ret_exp["out"]) - self.assertEqual(ret["tablename"], ret_exp["tablename"]) - self.assertEqual(ret["message"], ret_exp["message"]) - - def test_get_table_inventory(self): - table = "ModuleTable" - file = "inventory.yml" - pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__)) - path = pyez_tables_path - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("yaml.load") as mock_yamlload, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file, path) - self.assertEqual(ret["out"], True) - - def test_get_table_no_path_inventory(self): - table = "ModuleTable" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("yaml.load") as mock_yamlload, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], True) From be22292a55307fd0392ce7b7db373802aef80fb4 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 23 Oct 2023 14:40:55 -0600 Subject: [PATCH 002/196] Added pragma no cover for functions specific only to Juniper native minion --- salt/modules/junos.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/salt/modules/junos.py b/salt/modules/junos.py index 33f25080e1d..dd130bb1c11 100644 --- a/salt/modules/junos.py +++ b/salt/modules/junos.py @@ -2050,6 +2050,7 @@ def _make_source_list(dir): return dir_list +# pragma: no cover @_timeout_decorator def file_compare(file1, file2, **kwargs): """ @@ -2112,6 +2113,7 @@ def file_compare(file1, file2, **kwargs): return ret +# pragma: no cover @_timeout_decorator def fsentry_exists(dir, **kwargs): """ @@ -2257,6 +2259,7 @@ def routing_engine(**kwargs): return ret +# pragma: no cover @_timeout_decorator def dir_copy(source, dest, force=False, **kwargs): """ From a21c1bc8cf3dcf3b1d3362a96150b77649ed3c1e Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 25 Oct 2023 10:40:52 -0600 Subject: [PATCH 003/196] Added skip on Windows for junos test --- tests/pytests/unit/modules/test_junos.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/tests/pytests/unit/modules/test_junos.py b/tests/pytests/unit/modules/test_junos.py index fc6e0b92ec2..a10decadaab 100644 --- a/tests/pytests/unit/modules/test_junos.py +++ b/tests/pytests/unit/modules/test_junos.py @@ -26,6 +26,10 @@ try: except ImportError: HAS_JUNOS = False +pytestmark = [ + pytest.mark.skip_on_windows(reason="Not supported on Windows"), +] + @pytest.mark.skipif( not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" From 0d26d07ff515674be75f2a77b862e1d4c18f9a90 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 27 Oct 2023 12:15:13 -0600 Subject: [PATCH 004/196] Updated test per reviewer comments --- tests/pytests/unit/modules/test_junos.py | 743 ++++++++++++----------- 1 file changed, 397 insertions(+), 346 deletions(-) diff --git a/tests/pytests/unit/modules/test_junos.py b/tests/pytests/unit/modules/test_junos.py index a10decadaab..eb25a0ec95c 100644 --- a/tests/pytests/unit/modules/test_junos.py +++ b/tests/pytests/unit/modules/test_junos.py @@ -28,12 +28,12 @@ except ImportError: pytestmark = [ pytest.mark.skip_on_windows(reason="Not supported on Windows"), + pytest.mark.skipif( + not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" + ), ] -@pytest.mark.skipif( - not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" -) @pytest.fixture def mock_cp(*args, **kwargs): pass @@ -206,8 +206,115 @@ def test__timeout_cleankwargs_decorator(): def test_facts_refresh(): with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: - ret = dict() - ret["facts"] = { + ret = { + "out": True, + "facts": { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + }, + } + assert junos.facts_refresh() == ret + + +def test_facts_refresh_exception(): + with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: + mock_facts_refresh.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.facts_refresh() == ret + + +def test_facts(): + ret = { + "out": True, + "facts": { "2RE": True, "HOME": "/var/home/regress", "RE0": { @@ -295,127 +402,25 @@ def test_facts_refresh(): "type": "I", }, "virtual": True, - } - ret["out"] = True - assert junos.facts_refresh() == ret - - -def test_facts_refresh_exception(): - with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: - mock_facts_refresh.side_effect = raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - assert junos.facts_refresh() == ret - - -def test_facts(): - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, } - ret["out"] = True assert junos.facts() == ret def test_facts_exception(): with patch.dict(junos.__proxy__, {"junos.get_serialized_facts": raise_exception}): - ret = dict() - ret["message"] = 'Could not display facts due to "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not display facts due to "Test exception"', + "out": False, + } assert junos.facts() == ret def test_set_hostname_without_args(): - ret = dict() - ret["message"] = "Please provide the hostname." - ret["out"] = False + ret = { + "message": "Please provide the hostname.", + "out": False, + } assert junos.set_hostname() == ret @@ -428,18 +433,20 @@ def test_set_hostname_load_called_with_valid_name(): def test_set_hostname_raise_exception_for_load(): with patch("jnpr.junos.utils.config.Config.load") as mock_load: mock_load.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not load configuration due to error "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not load configuration due to error "Test exception"', + "out": False, + } assert junos.set_hostname("Test-name") == ret def test_set_hostname_raise_exception_for_commit_check(): with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: mock_commit_check.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to error "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not commit check due to error "Test exception"', + "out": False, + } assert junos.set_hostname("test-name") == ret @@ -507,20 +514,20 @@ def test_set_hostname_successful_return_message(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully changed hostname." - ret["out"] = True + ret = { + "message": "Successfully changed hostname.", + "out": True, + } assert junos.set_hostname("test-name", **args) == ret def test_set_hostname_raise_exception_for_commit(): with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: mock_commit.side_effect = raise_exception - ret = dict() - ret[ - "message" - ] = 'Successfully loaded host-name but commit failed with "Test exception"' - ret["out"] = False + ret = { + "message": 'Successfully loaded host-name but commit failed with "Test exception"', + "out": False, + } assert junos.set_hostname("test-name") == ret @@ -529,9 +536,10 @@ def test_set_hostname_fail_commit_check(): "jnpr.junos.utils.config.Config.commit_check" ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret["message"] = "Successfully loaded host-name but pre-commit check failed." + ret = { + "message": "Successfully loaded host-name but pre-commit check failed.", + "out": False, + } assert junos.set_hostname("test") == ret @@ -543,18 +551,20 @@ def test_commit_without_args(): ) as mock_commit: mock_commit.return_value = True mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit Successful." - ret["out"] = True + ret = { + "message": "Commit Successful.", + "out": True, + } assert junos.commit() == ret def test_commit_raise_commit_check_exception(): with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: mock_commit_check.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not perform commit check due to "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not perform commit check due to "Test exception"', + "out": False, + } assert junos.commit() == ret @@ -566,11 +576,10 @@ def test_commit_raise_commit_exception(): ) as mock_commit: mock_commit_check.return_value = True mock_commit.side_effect = raise_exception - ret = dict() - ret["out"] = False - ret[ - "message" - ] = 'Commit check succeeded but actual commit failed with "Test exception"' + ret = { + "message": 'Commit check succeeded but actual commit failed with "Test exception"', + "out": False, + } assert junos.commit() == ret @@ -630,27 +639,30 @@ def test_commit_pyez_commit_returning_false(): ) as mock_commit: mock_commit.return_value = False mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit failed." - ret["out"] = False + ret = { + "message": "Commit failed.", + "out": False, + } assert junos.commit() == ret def test_commit_pyez_commit_check_returns_false(): with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret["message"] = "Pre-commit check failed." + ret = { + "message": "Pre-commit check failed.", + "out": False, + } assert junos.commit() == ret def test_rollback_exception(): with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: mock_rollback.side_effect = raise_exception - ret = dict() - ret["message"] = 'Rollback failed due to "Test exception"' - ret["out"] = False + ret = { + "message": 'Rollback failed due to "Test exception"', + "out": False, + } assert junos.rollback() == ret @@ -664,18 +676,20 @@ def test_rollback_without_args_success(): ) as mock_rollback: mock_commit_check.return_value = True mock_rollback.return_value = True - ret = dict() - ret["message"] = "Rollback successful" - ret["out"] = True + ret = { + "message": "Rollback successful", + "out": True, + } assert junos.rollback() == ret def test_rollback_without_args_fail(): with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: mock_rollback.return_value = False - ret = dict() - ret["message"] = "Rollback failed" - ret["out"] = False + ret = { + "message": "Rollback failed", + "out": False, + } assert junos.rollback() == ret @@ -863,9 +877,10 @@ def test_rollback_commit_check_exception(): "jnpr.junos.utils.config.Config.rollback" ) as mock_rollback: mock_commit_check.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not commit check due to "Test exception"', + "out": False, + } assert junos.rollback() == ret @@ -879,11 +894,10 @@ def test_rollback_commit_exception(): ) as mock_rollback: mock_commit_check.return_value = True mock_commit.side_effect = raise_exception - ret = dict() - ret[ - "message" - ] = 'Rollback successful but commit failed with error "Test exception"' - ret["out"] = False + ret = { + "message": 'Rollback successful but commit failed with error "Test exception"', + "out": False, + } assert junos.rollback() == ret @@ -894,9 +908,10 @@ def test_rollback_commit_check_fails(): "jnpr.junos.utils.config.Config.rollback" ) as mock_rollback: mock_commit_check.return_value = False - ret = dict() - ret["message"] = "Rollback successful but pre-commit check failed." - ret["out"] = False + ret = { + "message": "Rollback successful but pre-commit check failed.", + "out": False, + } assert junos.rollback() == ret @@ -915,16 +930,18 @@ def test_diff_with_arg(): def test_diff_exception(): with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: mock_diff.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not get diff with error "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not get diff with error "Test exception"', + "out": False, + } assert junos.diff() == ret def test_ping_without_args(): - ret = dict() - ret["message"] = "Please specify the destination ip to ping." - ret["out"] = False + ret = { + "message": "Please specify the destination ip to ping.", + "out": False, + } assert junos.ping() == ret @@ -958,16 +975,18 @@ def test_ping_ttl(): def test_ping_exception(): with patch("jnpr.junos.device.Device.execute") as mock_execute: mock_execute.side_effect = raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } assert junos.ping("1.1.1.1") == ret def test_cli_without_args(): - ret = dict() - ret["message"] = "Please provide the CLI command to be executed." - ret["out"] = False + ret = { + "message": "Please provide the CLI command to be executed.", + "out": False, + } assert junos.cli() == ret @@ -980,9 +999,10 @@ def test_cli_with_format_as_empty_string(): def test_cli(): with patch("jnpr.junos.device.Device.cli") as mock_cli: mock_cli.return_vale = "CLI result" - ret = dict() - ret["message"] = "CLI result" - ret["out"] = True + ret = { + "message": "CLI result", + "out": True, + } junos.cli("show version") mock_cli.assert_called_with("show version", "text", warning=False) @@ -1003,9 +1023,10 @@ def test_cli_format_xml(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = "test" - ret["out"] = True + ret = { + "message": "test", + "out": True, + } assert junos.cli("show version", **args) == ret mock_cli.assert_called_with("show version", "xml", warning=False) mock_to_string.assert_called_once_with("test") @@ -1015,9 +1036,10 @@ def test_cli_format_xml(): def test_cli_exception_in_cli(): with patch("jnpr.junos.device.Device.cli") as mock_cli: mock_cli.side_effect = raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } assert junos.cli("show version") == ret @@ -1037,9 +1059,10 @@ def test_cli_output_save(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = "Test return" - ret["out"] = True + ret = { + "message": "Test return", + "out": True, + } assert junos.cli("show version", **args) == ret mock_fopen.assert_called_with("/path/to/file", "w") mock_cli.assert_called_with("show version", "text", warning=False) @@ -1062,24 +1085,27 @@ def test_cli_output_save_ioexception(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = 'Unable to open "/path/to/file" to write' - ret["out"] = False + ret = { + "message": 'Unable to open "/path/to/file" to write', + "out": False, + } assert junos.cli("show version", **args) == ret def test_shutdown_without_args(): - ret = dict() - ret["message"] = "Provide either one of the arguments: shutdown or reboot." - ret["out"] = False + ret = { + "message": "Provide either one of the arguments: shutdown or reboot.", + "out": False, + } assert junos.shutdown() == ret def test_shutdown_with_reboot_args(): with patch("salt.modules.junos.SW.reboot") as mock_reboot: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True + ret = { + "message": "Successfully powered off/rebooted.", + "out": True, + } args = { "__pub_user": "root", "__pub_arg": [{"reboot": True}], @@ -1096,9 +1122,10 @@ def test_shutdown_with_reboot_args(): def test_shutdown_with_poweroff_args(): with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True + ret = { + "message": "Successfully powered off/rebooted.", + "out": True, + } args = { "__pub_user": "root", "__pub_arg": [{"shutdown": True}], @@ -1114,9 +1141,10 @@ def test_shutdown_with_poweroff_args(): def test_shutdown_with_shutdown_as_false(): - ret = dict() - ret["message"] = "Nothing to be done." - ret["out"] = False + ret = { + "message": "Nothing to be done.", + "out": False, + } args = { "__pub_user": "root", "__pub_arg": [{"shutdown": False}], @@ -1177,16 +1205,18 @@ def test_shutdown_fail_with_exception(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = 'Could not poweroff/reboot because "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not poweroff/reboot because "Test exception"', + "out": False, + } assert junos.shutdown(**args) == ret def test_install_config_without_args(): - ret = dict() - ret["message"] = "Please provide the salt path where the configuration is present" - ret["out"] = False + ret = { + "message": "Please provide the salt path where the configuration is present", + "out": False, + } assert junos.install_config() == ret @@ -1194,10 +1224,10 @@ def test_install_config_cp_fails(): with patch.dict( junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False + ret = { + "message": "Invalid file path.", + "out": False, + } assert junos.install_config("path") == ret @@ -1205,10 +1235,10 @@ def test_install_config_file_cp_fails(): with patch.dict( junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False + ret = { + "message": "Invalid file path.", + "out": False, + } assert junos.install_config("path") == ret @@ -1246,9 +1276,10 @@ def test_install_config(): mock_diff.return_value = "diff" mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("salt://actual/path/config.set") == ret mock_load.assert_called_with(path="test/path/config", format="set") @@ -1287,9 +1318,10 @@ def test_install_config_xml_file(): mock_diff.return_value = "diff" mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("salt://actual/path/config.xml") == ret mock_load.assert_called_with(path="test/path/config", format="xml") @@ -1328,9 +1360,10 @@ def test_install_config_text_file(): mock_diff.return_value = "diff" mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("salt://actual/path/config") == ret mock_load.assert_called_with(path="test/path/config", format="text") @@ -1369,9 +1402,10 @@ def test_install_config_cache_not_exists(): mock_commit_check.return_value = True mock_mkdtemp.return_value = "/tmp/argr5351afd" - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert ( junos.install_config("salt://actual/path/config", template_vars=True) == ret @@ -1424,9 +1458,10 @@ def test_install_config_replace(): "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("salt://actual/path/config.set", **args) == ret mock_load.assert_called_with( path="test/path/config", format="set", merge=False @@ -1478,9 +1513,10 @@ def test_install_config_overwrite(): "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("salt://actual/path/config.xml", **args) == ret mock_load.assert_called_with( path="test/path/config", format="xml", overwrite=True @@ -1532,9 +1568,10 @@ def test_install_config_overwrite_false(): "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("salt://actual/path/config", **args) == ret mock_load.assert_called_with( path="test/path/config", format="text", merge=True @@ -1557,10 +1594,11 @@ def test_install_config_load_causes_exception(): mock_getsize.return_value = 10 mock_mkstemp.return_value = "test/path/config" mock_load.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not load configuration due to : "Test exception"' - ret["format"] = "set" - ret["out"] = False + ret = { + "message": 'Could not load configuration due to : "Test exception"', + "out": False, + "format": "set", + } assert junos.install_config(path="actual/path/config.set") == ret @@ -1580,9 +1618,10 @@ def test_install_config_no_diff(): mock_getsize.return_value = 10 mock_mkstemp.return_value = "test/path/config" mock_diff.return_value = None - ret = dict() - ret["message"] = "Configuration already applied!" - ret["out"] = True + ret = { + "message": "Configuration already applied!", + "out": True, + } assert junos.install_config("actual/path/config") == ret @@ -1621,9 +1660,10 @@ def test_install_config_write_diff(): "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("actual/path/config", **args) == ret mock_fopen.assert_called_with("copy/config/here", "w") @@ -1666,9 +1706,10 @@ def test_install_config_write_diff_exception(): "__pub_ret": "", } - ret = dict() - ret["message"] = "Could not write into diffs_file due to: 'Test exception'" - ret["out"] = False + ret = { + "message": "Could not write into diffs_file due to: 'Test exception'", + "out": False, + } assert junos.install_config("actual/path/config", **args) == ret @@ -1706,9 +1747,10 @@ def test_install_config_commit_params(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } assert junos.install_config("actual/path/config", **args) == ret mock_commit.assert_called_with(comment="comitted via salt", confirm=3) @@ -1737,12 +1779,10 @@ def test_install_config_commit_check_fails(): mock_diff.return_value = "diff" mock_commit_check.return_value = False - ret = dict() - ret["message"] = ( - "Loaded configuration but commit check failed, hence rolling back" - " configuration." - ) - ret["out"] = False + ret = { + "message": "Loaded configuration but commit check failed, hence rolling back configuration.", + "out": False, + } assert junos.install_config("actual/path/config.xml") == ret @@ -1770,11 +1810,10 @@ def test_install_config_commit_exception(): mock_diff.return_value = "diff" mock_commit_check.return_value = True mock_commit.side_effect = raise_exception - ret = dict() - ret[ - "message" - ] = 'Commit check successful but commit failed with "Test exception"' - ret["out"] = False + ret = { + "message": 'Commit check successful but commit failed with "Test exception"', + "out": False, + } assert junos.install_config("actual/path/config") == ret @@ -1801,12 +1840,10 @@ def test_install_config_test_mode(): mock_mkstemp.return_value = "test/path/config" mock_diff.return_value = "diff" mock_commit_check.return_value = True - ret = dict() - ret["message"] = ( - "Commit check passed, but skipping commit for dry-run and rolling back" - " configuration." - ) - ret["out"] = True + ret = { + "message": "Commit check passed, but skipping commit for dry-run and rolling back configuration.", + "out": True, + } assert junos.install_config("actual/path/config", test=True) == ret mock_commit.assert_not_called() @@ -1834,11 +1871,10 @@ def test_install_config_write_diff_dynamic_mode(): mock_mkstemp.return_value = "test/path/config" mock_diff.return_value = "diff" mock_commit_check.return_value = True - ret = dict() - ret[ - "message" - ] = "Write diff is not supported with dynamic/ephemeral configuration mode" - ret["out"] = False + ret = { + "message": "Write diff is not supported with dynamic/ephemeral configuration mode", + "out": False, + } assert ( junos.install_config( "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" @@ -1871,9 +1907,10 @@ def test_install_config_unknown_mode(): mock_mkstemp.return_value = "test/path/config" mock_diff.return_value = "diff" mock_commit_check.return_value = True - ret = dict() - ret["message"] = "install_config failed due to: unsupported action: abcdef" - ret["out"] = False + ret = { + "message": "install_config failed due to: unsupported action: abcdef", + "out": False, + } assert junos.install_config("actual/path/config", mode="abcdef") == ret mock_commit.assert_not_called() @@ -1881,26 +1918,29 @@ def test_install_config_unknown_mode(): def test_zeroize(): with patch("jnpr.junos.device.Device.cli") as mock_cli: result = junos.zeroize() - ret = dict() - ret["out"] = True - ret["message"] = "Completed zeroize and rebooted" mock_cli.assert_called_once_with("request system zeroize") + ret = { + "message": "Completed zeroize and rebooted", + "out": True, + } assert result == ret def test_zeroize_throw_exception(): with patch("jnpr.junos.device.Device.cli") as mock_cli: mock_cli.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not zeroize due to : "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not zeroize due to : "Test exception"', + "out": False, + } assert junos.zeroize() == ret def test_install_os_without_args(): - ret = dict() - ret["message"] = "Please provide the salt path where the junos image is present." - ret["out"] = False + ret = { + "message": "Please provide the salt path where the junos image is present.", + "out": False, + } assert junos.install_os() == ret @@ -1933,12 +1973,10 @@ def test_install_os_cp_fails(): False, "Invalid path. Please provide a valid image path", ) - ret = dict() - ret["message"] = ( - "Installation failed. Reason: Invalid path. Please provide a valid" - " image path" - ) - ret["out"] = False + ret = { + "message": "Installation failed. Reason: Invalid path. Please provide a valid image path", + "out": False, + } assert junos.install_os("salt://image/path/") == ret @@ -1946,9 +1984,10 @@ def test_install_os_image_cp_fails(): with patch.dict( junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} ): - ret = dict() - ret["message"] = "Invalid path. Please provide a valid image path" - ret["out"] = False + ret = { + "message": "Invalid path. Please provide a valid image path", + "out": False, + } assert junos.install_os("/image/path/") == ret @@ -1977,9 +2016,10 @@ def test_install_os(): mock_getsize.return_value = 10 mock_isfile.return_value = True mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." + ret = { + "message": "Installed the os.", + "out": True, + } assert junos.install_os("path") == ret @@ -1998,9 +2038,10 @@ def test_install_os_failure(): mock_getsize.return_value = 10 mock_isfile.return_value = True mock_install.return_value = False, "because we are testing failure" - ret = dict() - ret["out"] = False - ret["message"] = "Installation failed. Reason: because we are testing failure" + ret = { + "message": "Installation failed. Reason: because we are testing failure", + "out": False, + } assert junos.install_os("path") == ret @@ -2029,9 +2070,10 @@ def test_install_os_with_reboot_arg(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret["message"] = "Successfully installed and rebooted!" - ret["out"] = True + ret = { + "message": "Successfully installed and rebooted!", + "out": True, + } assert junos.install_os("path", **args) == ret @@ -2050,9 +2092,10 @@ def test_install_os_pyez_install_throws_exception(): mock_getsize.return_value = 10 mock_isfile.return_value = True mock_install.side_effect = raise_exception - ret = dict() - ret["message"] = 'Installation failed due to: "Test exception"' - ret["out"] = False + ret = { + "message": 'Installation failed due to: "Test exception"', + "out": False, + } assert junos.install_os("path") == ret @@ -2082,11 +2125,10 @@ def test_install_os_with_reboot_raises_exception(): "__pub_tgt_type": "glob", "__pub_ret": "", } - ret = dict() - ret[ - "message" - ] = 'Installation successful but reboot failed due to : "Test exception"' - ret["out"] = False + ret = { + "message": 'Installation successful but reboot failed due to : "Test exception"', + "out": False, + } assert junos.install_os("path", **args) == ret @@ -2105,9 +2147,10 @@ def test_install_os_no_copy(): mock_getsize.return_value = 10 mock_isfile.return_value = True mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." + ret = { + "message": "Installed the os.", + "out": True, + } assert junos.install_os("path", no_copy=True) == ret mock_install.assert_called_with( "path", no_copy=True, progress=True, timeout=1800 @@ -2131,9 +2174,10 @@ def test_install_os_issu(): mock_getsize.return_value = 10 mock_isfile.return_value = True mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." + ret = { + "message": "Installed the os.", + "out": True, + } assert junos.install_os("path", issu=True) == ret mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) @@ -2153,9 +2197,10 @@ def test_install_os_add_params(): mock_getsize.return_value = 10 mock_isfile.return_value = True mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." + ret = { + "message": "Installed the os.", + "out": True, + } remote_path = "/path/to/file" assert ( junos.install_os("path", remote_path=remote_path, nssu=True, validate=True) @@ -2184,9 +2229,10 @@ def test_file_copy_invalid_src(mock_scpclient, mock_put, mock_ssh): mock_put.side_effect = Exception(invalid_path) with patch("os.path.isfile") as mock_isfile: mock_isfile.return_value = False - ret = dict() - ret["message"] = 'Could not copy file : "invalid/file/path"' - ret["out"] = False + ret = { + "message": 'Could not copy file : "invalid/file/path"', + "out": False, + } assert junos.file_copy(invalid_path, "file") == ret @@ -2199,9 +2245,10 @@ def test_file_copy(): "os.path.isfile" ) as mock_isfile: mock_isfile.return_value = True - ret = dict() - ret["message"] = "Successfully copied file from test/src/file to file" - ret["out"] = True + ret = { + "message": "Successfully copied file from test/src/file to file", + "out": True, + } assert junos.file_copy(dest="file", src="test/src/file") == ret @@ -2211,9 +2258,10 @@ def test_file_copy_exception(): ) as mock_isfile: mock_isfile.return_value = True mock_scp.side_effect = raise_exception - ret = dict() - ret["message"] = 'Could not copy file : "Test exception"' - ret["out"] = False + ret = { + "message": 'Could not copy file : "Test exception"', + "out": False, + } assert junos.file_copy(dest="file", src="test/src/file") == ret @@ -2239,18 +2287,20 @@ def test_virtual_all_true(): def test_rpc_without_args(): - ret = dict() - ret["message"] = "Please provide the rpc to execute." - ret["out"] = False + ret = { + "message": "Please provide the rpc to execute.", + "out": False, + } assert junos.rpc() == ret def test_rpc_get_config_exception(): with patch("jnpr.junos.device.Device.execute") as mock_execute: mock_execute.side_effect = raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False + ret = { + "message": 'RPC execution failed due to "Test exception"', + "out": False, + } assert junos.rpc("get_config") == ret @@ -2329,9 +2379,10 @@ def test_rpc_get_chassis_inventory_filter_as_arg(): def test_rpc_get_interface_information_exception(): with patch("jnpr.junos.device.Device.execute") as mock_execute: mock_execute.side_effect = raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False + ret = { + "message": 'RPC execution failed due to "Test exception"', + "out": False, + } assert junos.rpc("get_interface_information") == ret From f649068cadd2dba56c858f01bdf0967b68168ca4 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 30 Oct 2023 16:05:52 -0600 Subject: [PATCH 005/196] Removed use of mock_cp as per reviewer's comments --- tests/pytests/unit/modules/test_junos.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/tests/pytests/unit/modules/test_junos.py b/tests/pytests/unit/modules/test_junos.py index eb25a0ec95c..616f15f1d8a 100644 --- a/tests/pytests/unit/modules/test_junos.py +++ b/tests/pytests/unit/modules/test_junos.py @@ -34,11 +34,6 @@ pytestmark = [ ] -@pytest.fixture -def mock_cp(*args, **kwargs): - pass - - @pytest.fixture def get_facts(): facts = { @@ -151,7 +146,7 @@ def make_connect(): @pytest.fixture -def configure_loader_modules(mock_cp, get_facts, make_connect): +def configure_loader_modules(get_facts, make_connect): return { junos: { "__proxy__": { @@ -161,8 +156,8 @@ def configure_loader_modules(mock_cp, get_facts, make_connect): "junos.reboot_clear": MagicMock(return_value=True), }, "__salt__": { - "cp.get_template": MagicMock(return_value=mock_cp), - "cp.get_file": MagicMock(return_value=mock_cp), + "cp.get_template": MagicMock(return_value=True), + "cp.get_file": MagicMock(return_value=True), "file.file_exists": MagicMock(return_value=True), "slsutil.renderer": MagicMock( return_value="set system host-name dummy" From 168c92f20c6209b4102cdd54976f8de16394adc3 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 30 Oct 2023 16:16:11 -0600 Subject: [PATCH 006/196] Moved pragma no cover statement to the function definition to be excluded --- salt/modules/junos.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/salt/modules/junos.py b/salt/modules/junos.py index dd130bb1c11..2f1f0c6ab4f 100644 --- a/salt/modules/junos.py +++ b/salt/modules/junos.py @@ -2050,9 +2050,8 @@ def _make_source_list(dir): return dir_list -# pragma: no cover @_timeout_decorator -def file_compare(file1, file2, **kwargs): +def file_compare(file1, file2, **kwargs): # pragma: no cover """ Compare two files and return a dictionary indicating if they are different. @@ -2113,9 +2112,8 @@ def file_compare(file1, file2, **kwargs): return ret -# pragma: no cover @_timeout_decorator -def fsentry_exists(dir, **kwargs): +def fsentry_exists(dir, **kwargs): # pragma: no cover """ Returns a dictionary indicating if `dir` refers to a file or a non-file (generally a directory) in the file system, @@ -2259,9 +2257,8 @@ def routing_engine(**kwargs): return ret -# pragma: no cover @_timeout_decorator -def dir_copy(source, dest, force=False, **kwargs): +def dir_copy(source, dest, force=False, **kwargs): # pragma: no cover """ Copy a directory and recursively its contents from source to dest. From 642a5bda69fce22a7904b96e026524254cab958a Mon Sep 17 00:00:00 2001 From: "Gareth J. Greenaway" Date: Tue, 17 Oct 2023 11:34:35 -0700 Subject: [PATCH 007/196] Additional tests for http state. --- tests/pytests/unit/states/test_http.py | 332 +++++++++++++++++++++++-- 1 file changed, 305 insertions(+), 27 deletions(-) diff --git a/tests/pytests/unit/states/test_http.py b/tests/pytests/unit/states/test_http.py index a672845e5c3..85150b4a2a8 100644 --- a/tests/pytests/unit/states/test_http.py +++ b/tests/pytests/unit/states/test_http.py @@ -42,26 +42,124 @@ def test_query(): with patch.dict(http.__salt__, {"http.query": mock}): assert http.query("salt", "Dude", "stack") == ret[1] + with patch.dict(http.__opts__, {"test": False}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": True, + "comment": "Status 200 was found.", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query(name="http://example.com/", status=200, decode=False) + == expected + ) + + with patch.dict(http.__opts__, {"test": False}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": True, + "comment": "Status 200 was found.", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.wait_for_successful_query": mock}): + assert ( + http.query(name="http://example.com/", status=200, wait_for=300) + == expected + ) + + with patch.dict(http.__opts__, {"test": True}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": None, + "comment": "Status 200 was found. (TEST MODE, TEST URL WAS: http://status.example.com)", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + name="http://example.com/", + status=200, + test_url="http://status.example.com", + ) + == expected + ) + def test_query_pcre_statustype(): """ Test to perform an HTTP query with a regex used to match the status code and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": ( - 'Match text "This page returned" was found. Status pattern "200|201" was' - " found." - ), - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ( + 'Match text "This page returned" was found. Status pattern "200|201" was' + " found." + ), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + status="200|201", + status_type="pcre", + ) + == state_return + ) + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Status pattern "200|201" was found.'), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + status="200|201", + status_type="pcre", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + mock = MagicMock(return_value=http_result) + + state_return = { + "name": "salturl", + "result": False, + "comment": 'Match text "This page returned" was found. Status pattern "200|201" was not found.', + "changes": {}, + "data": {"text": "This page returned a 403 status code", "status": "403"}, + } + with patch.dict(http.__salt__, {"http.query": mock}): assert ( http.query( @@ -74,23 +172,109 @@ def test_query_pcre_statustype(): ) +def test_query_pcre_matchtype(): + """ + Test to perform an HTTP query with a regex used to match the returned text and statefully return the result + """ + testurl = "salturl" + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Match pattern "This page returned" was found.'), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + match_type="pcre", + ) + == state_return + ) + + http_result = { + "text": "This page did not return a 201 status code", + "status": "403", + } + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Match pattern "This page returned" was not found.'), + "data": { + "status": "403", + "text": "This page did not return a 201 status code", + }, + "name": testurl, + "result": False, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + match_type="pcre", + ) + == state_return + ) + + def test_query_stringstatustype(): """ Test to perform an HTTP query with a string status code and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": 'Match text "This page returned" was found. Status 201 was found.', - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "changes": {}, + "comment": 'Match text "This page returned" was found. Status 201 was found.', + "data": { + "status": "201", + "text": "This page returned a 201 status code", + }, + "name": testurl, + "result": True, + } + + assert ( + http.query( + testurl, + match="This page returned", + status="201", + status_type="string", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + mock = MagicMock(return_value=http_result) + + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "name": "salturl", + "result": False, + "comment": 'Match text "This page returned" was found. Status 201 was not found.', + "changes": {}, + "data": { + "text": "This page returned a 403 status code", + "status": "403", + }, + } + assert ( http.query( testurl, @@ -102,21 +286,54 @@ def test_query_stringstatustype(): ) +def test_query_invalidstatustype(): + """ + Test to perform an HTTP query with a string status code and statefully return the result + """ + testurl = "salturl" + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "name": "salturl", + "result": None, + "comment": "", + "changes": {}, + "data": { + "text": "This page returned a 201 status code", + "status": "201", + }, + } + + assert ( + http.query( + testurl, + status="201", + status_type="invalid", + ) + == state_return + ) + + def test_query_liststatustype(): """ Test to perform an HTTP query with a list of status codes and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": 'Match text "This page returned" was found. Status 201 was found.', - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + state_return = { + "changes": {}, + "comment": 'Match text "This page returned" was found. Status 201 was found.', + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + mock = MagicMock(return_value=http_result) with patch.dict(http.__salt__, {"http.query": mock}): assert ( @@ -129,6 +346,48 @@ def test_query_liststatustype(): == state_return ) + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + state_return = { + "changes": {}, + "comment": "Status 201 was found.", + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + status=["200", "201"], + status_type="list", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + state_return = { + "name": "salturl", + "result": False, + "comment": "Match text \"This page returned a 200\" was not found. Statuses ['200', '201'] were not found.", + "changes": {}, + "data": {"text": "This page returned a 403 status code", "status": "403"}, + } + + mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned a 200", + status=["200", "201"], + status_type="list", + ) + == state_return + ) + def test_wait_for_with_interval(): """ @@ -156,3 +415,22 @@ def test_wait_for_without_interval(): with patch("time.sleep", MagicMock()) as sleep_mock: assert http.wait_for_successful_query("url", status=200) == {"result": True} sleep_mock.assert_not_called() + + query_mock = MagicMock(return_value={"result": False}) + + with patch.object(http, "query", query_mock): + with patch( + "time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958]) + ): + assert http.wait_for_successful_query("url", status=200) == { + "result": False + } + + query_mock = MagicMock(side_effect=Exception()) + + with patch.object(http, "query", query_mock): + with patch( + "time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958]) + ): + with pytest.raises(Exception): + http.wait_for_successful_query("url", status=200) From 652ea0e8714d89fbb0666f5a8139df7e0c1bc37e Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 30 Oct 2023 15:55:09 -0600 Subject: [PATCH 008/196] Added tests for ssh as part of code coverage increase --- salt/client/ssh/__init__.py | 1 + .../unit/client/ssh/test_ssh_classes.py | 82 +++++++++++++++++++ 2 files changed, 83 insertions(+) create mode 100644 tests/pytests/unit/client/ssh/test_ssh_classes.py diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index 067d4575f9b..57019579487 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -1659,6 +1659,7 @@ ARGS = {arguments}\n'''.format( return +# pragma: no cover def lowstate_file_refs(chunks): """ Create a list of file ref objects to reconcile diff --git a/tests/pytests/unit/client/ssh/test_ssh_classes.py b/tests/pytests/unit/client/ssh/test_ssh_classes.py new file mode 100644 index 00000000000..cabd4ff1722 --- /dev/null +++ b/tests/pytests/unit/client/ssh/test_ssh_classes.py @@ -0,0 +1,82 @@ +import logging + +import pytest +from saltfactories.utils.tempfiles import temp_directory + +import salt.client.ssh.__init__ as dunder_ssh +from salt.exceptions import SaltClientError, SaltSystemExit +from tests.support.mock import MagicMock, patch + +pytestmark = [pytest.mark.skip_unless_on_linux(reason="Test ssh only run on Linux")] + + +log = logging.getLogger(__name__) + + +def test_salt_refs(): + data_strg_cats = "cats" + ret = dunder_ssh.salt_refs(data_strg_cats) + assert ret == [] + + data_strg_proto = "salt://test_salt_ref" + ret = dunder_ssh.salt_refs(data_strg_proto) + assert ret == [data_strg_proto] + + data_list_no_proto = ["cats"] + ret = dunder_ssh.salt_refs(data_list_no_proto) + assert ret == [] + + data_list_proto = ["salt://test_salt_ref1", "salt://test_salt_ref2", "cats"] + ret = dunder_ssh.salt_refs(data_list_proto) + assert ret == ["salt://test_salt_ref1", "salt://test_salt_ref2"] + + +def test_convert_args(): + test_args = [ + "arg1", + {"key1": "value1", "key2": "value2", "__kwarg__": "kwords"}, + "dog1", + ] + expected = ["arg1", "key1=value1", "key2=value2", "dog1"] + ret = dunder_ssh._convert_args(test_args) + assert ret == expected + + +def test_ssh_class(): + + with temp_directory() as temp_dir: + assert temp_dir.is_dir() + opts = { + "sock_dir": temp_dir, + "regen_thin": False, + "__master_opts__": {"pki_dir": "pki"}, + "selected_target_option": None, + "tgt": "*", + "tgt_type": "glob", + "fileserver_backend": ["roots"], + "cachedir": "/tmp", + "thin_extra_mods": "", + "ssh_ext_alternatives": None, + } + + with patch("salt.utils.path.which", return_value=""), pytest.raises( + SaltSystemExit + ) as err: + test_ssh = dunder_ssh.SSH(opts) + assert ( + "salt-ssh could not be run because it could not generate keys." + in str(err.value) + ) + + with patch("salt.utils.path.which", return_value="/usr/bin/ssh"), patch( + "os.path.isfile", return_value=False + ), patch( + "salt.client.ssh.shell.gen_key", MagicMock(side_effect=OSError()) + ), pytest.raises( + SaltClientError + ) as err: + test_ssh = dunder_ssh.SSH(opts) + assert ( + "salt-ssh could not be run because it could not generate keys." + in err.value + ) From 850f0b19650a3a23bd865edb7452f1ac4a09e7cd Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 30 Oct 2023 16:21:04 -0600 Subject: [PATCH 009/196] Moved pragma to line of function definition --- salt/client/ssh/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index 57019579487..8601d8d1745 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -1659,8 +1659,7 @@ ARGS = {arguments}\n'''.format( return -# pragma: no cover -def lowstate_file_refs(chunks): +def lowstate_file_refs(chunks): # pragma: no cover """ Create a list of file ref objects to reconcile """ From 4fc11cd53ea49e973b3bce2e820a154291e14446 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 1 Nov 2023 15:25:36 -0700 Subject: [PATCH 010/196] Add regression test for issue 65400 --- tests/pytests/integration/cli/test_salt.py | 35 ++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/tests/pytests/integration/cli/test_salt.py b/tests/pytests/integration/cli/test_salt.py index 8e360682e84..7f026845843 100644 --- a/tests/pytests/integration/cli/test_salt.py +++ b/tests/pytests/integration/cli/test_salt.py @@ -24,6 +24,19 @@ pytestmark = [ ] +@pytest.fixture +def salt_minion_2(salt_master): + """ + A running salt-minion fixture + """ + factory = salt_master.salt_minion_daemon( + "minion-2", + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + def test_context_retcode_salt(salt_cli, salt_minion): """ Test that a nonzero retcode set in the context dunder will cause the @@ -234,3 +247,25 @@ def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion): assert "Exiting gracefully on Ctrl-c" in ret.stderr assert "Exception ignored in" not in ret.stderr assert "This job's jid is" in ret.stderr + + +def test_minion_65400(salt_cli, salt_minion, salt_minion_2, salt_master): + """ + Ensure correct exit status when salt CLI starts correctly. + + """ + state = f""" + custom_test_state: + test.configurable_test_state: + - name: example + - changes: True + - result: False + - comment: 65400 regression test + """ + with salt_master.state_tree.base.temp_file("test_65400.sls", state): + ret = salt_cli.run("state.sls", "test_65400", minion_tgt="*") + assert isinstance(ret.data, dict) + assert len(ret.data.keys()) == 2 + for minion_id in ret.data: + assert ret.data[minion_id] != "Error: test.configurable_test_state" + assert isinstance(ret.data[minion_id], dict) From 334c5bac2554ef17351003298967ec412ba7df64 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Fri, 13 Oct 2023 13:51:38 -0700 Subject: [PATCH 011/196] Only process events that are job returns --- salt/client/__init__.py | 62 ++++++++++++++++++++++------------------- 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/salt/client/__init__.py b/salt/client/__init__.py index 7ce8963b8f6..307ce8a0ad4 100644 --- a/salt/client/__init__.py +++ b/salt/client/__init__.py @@ -299,7 +299,7 @@ class LocalClient: tgt_type=tgt_type, timeout=timeout, listen=listen, - **kwargs + **kwargs, ) if "jid" in pub_data: @@ -365,7 +365,7 @@ class LocalClient: jid="", kwarg=None, listen=False, - **kwargs + **kwargs, ): """ Asynchronously send a command to connected minions @@ -393,7 +393,7 @@ class LocalClient: jid=jid, timeout=self._get_timeout(timeout), listen=listen, - **kwargs + **kwargs, ) except SaltClientError: # Re-raise error with specific message @@ -429,7 +429,7 @@ class LocalClient: kwarg=None, listen=True, io_loop=None, - **kwargs + **kwargs, ): """ Asynchronously send a command to connected minions @@ -458,7 +458,7 @@ class LocalClient: timeout=self._get_timeout(timeout), io_loop=io_loop, listen=listen, - **kwargs + **kwargs, ) except SaltClientError: # Re-raise error with specific message @@ -511,7 +511,7 @@ class LocalClient: cli=False, progress=False, full_return=False, - **kwargs + **kwargs, ): """ Execute a command on a random subset of the targeted systems @@ -553,7 +553,7 @@ class LocalClient: kwarg=kwarg, progress=progress, full_return=full_return, - **kwargs + **kwargs, ) def cmd_batch( @@ -565,7 +565,7 @@ class LocalClient: ret="", kwarg=None, batch="10%", - **kwargs + **kwargs, ): """ Iteratively execute a command on subsets of minions at a time @@ -641,7 +641,7 @@ class LocalClient: jid="", full_return=False, kwarg=None, - **kwargs + **kwargs, ): """ Synchronously execute a command on targeted minions @@ -759,7 +759,7 @@ class LocalClient: jid, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -772,7 +772,7 @@ class LocalClient: self._get_timeout(timeout), tgt, tgt_type, - **kwargs + **kwargs, ): if fn_ret: @@ -797,7 +797,7 @@ class LocalClient: verbose=False, kwarg=None, progress=False, - **kwargs + **kwargs, ): """ Used by the :command:`salt` CLI. This method returns minion returns as @@ -821,7 +821,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not self.pub_data: yield self.pub_data @@ -835,7 +835,7 @@ class LocalClient: tgt_type, verbose, progress, - **kwargs + **kwargs, ): if not fn_ret: @@ -866,7 +866,7 @@ class LocalClient: tgt_type="glob", ret="", kwarg=None, - **kwargs + **kwargs, ): """ Yields the individual minion returns as they come in @@ -901,7 +901,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -915,7 +915,7 @@ class LocalClient: timeout=self._get_timeout(timeout), tgt=tgt, tgt_type=tgt_type, - **kwargs + **kwargs, ): if not fn_ret: continue @@ -936,7 +936,7 @@ class LocalClient: kwarg=None, show_jid=False, verbose=False, - **kwargs + **kwargs, ): """ Yields the individual minion returns as they come in, or None @@ -972,7 +972,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -985,7 +985,7 @@ class LocalClient: tgt=tgt, tgt_type=tgt_type, block=False, - **kwargs + **kwargs, ): if fn_ret and any([show_jid, verbose]): for minion in fn_ret: @@ -1007,7 +1007,7 @@ class LocalClient: ret="", verbose=False, kwarg=None, - **kwargs + **kwargs, ): """ Execute a salt command and return @@ -1024,7 +1024,7 @@ class LocalClient: timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -1046,7 +1046,7 @@ class LocalClient: tgt_type="glob", verbose=False, show_jid=False, - **kwargs + **kwargs, ): """ Starts a watcher looking at the return data for a specified JID @@ -1123,7 +1123,7 @@ class LocalClient: tgt_type="glob", expect_minions=False, block=True, - **kwargs + **kwargs, ): """ Watch the event system and return job data as it comes in @@ -1202,7 +1202,13 @@ class LocalClient: if "missing" in raw.get("data", {}): missing.update(raw["data"]["missing"]) continue + + # Anything below this point is expected to be a job return event. + if not raw["tag"].startswith(f"salt/job/{jid}/ret"): + log.debug("Skipping non return event: %s", raw["tag"]) + continue if "return" not in raw["data"]: + log.warning("Malformed event return: %s", raw["tag"]) continue if kwargs.get("raw", False): found.add(raw["data"]["id"]) @@ -1628,7 +1634,7 @@ class LocalClient: progress=False, show_timeout=False, show_jid=False, - **kwargs + **kwargs, ): """ Get the returns for the command line interface via the event system @@ -1658,7 +1664,7 @@ class LocalClient: expect_minions=( kwargs.pop("expect_minions", False) or verbose or show_timeout ), - **kwargs + **kwargs, ): log.debug("return event: %s", ret) return_count = return_count + 1 @@ -1851,7 +1857,7 @@ class LocalClient: jid="", timeout=5, listen=False, - **kwargs + **kwargs, ): """ Take the required arguments and publish the given command. @@ -1953,7 +1959,7 @@ class LocalClient: timeout=5, io_loop=None, listen=True, - **kwargs + **kwargs, ): """ Take the required arguments and publish the given command. From 5942fb296e961afc76b594d83a6be4663af4e0c5 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 1 Nov 2023 15:28:10 -0700 Subject: [PATCH 012/196] Add changelog for 65400 fix --- changelog/65400.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/65400.fixed.md diff --git a/changelog/65400.fixed.md b/changelog/65400.fixed.md new file mode 100644 index 00000000000..ae21abac9fe --- /dev/null +++ b/changelog/65400.fixed.md @@ -0,0 +1 @@ +Client only process events which tag conforms to an event return. From 1f13ff59b391b03d24c817d97d2ac5791068a570 Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 20 Sep 2023 09:45:39 -0600 Subject: [PATCH 013/196] Increase coverage for win_lgpo --- salt/modules/win_lgpo.py | 13 +- .../win_lgpo/test_adv_audit_settings_state.py | 6 + .../modules/win_lgpo/test__policy_info.py | 435 ++++++++++++++++++ 3 files changed, 447 insertions(+), 7 deletions(-) create mode 100644 tests/pytests/unit/modules/win_lgpo/test__policy_info.py diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index e7533f62e0c..12819acec8e 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -59,6 +59,7 @@ import salt.utils.files import salt.utils.path import salt.utils.platform import salt.utils.stringutils +import salt.utils.win_lgpo_auditpol import salt.utils.win_lgpo_netsh from salt.exceptions import CommandExecutionError, SaltInvocationError from salt.serializers.configparser import deserialize @@ -4799,8 +4800,6 @@ class _policy_info: """ converts a list of pysid objects to string representations """ - if isinstance(val, str): - val = val.split(",") usernames = [] for _sid in val: try: @@ -4918,11 +4917,11 @@ class _policy_info: return None if value_lookup: if not isinstance(item, list): - return "Invalid Value" + return "Invalid Value: Not a list" ret_val = 0 else: if not isinstance(item, int): - return "Invalid Value" + return "Invalid Value: Not an int" ret_val = [] if "lookup" in kwargs: for k, v in kwargs["lookup"].items(): @@ -4937,7 +4936,7 @@ class _policy_info: if do_test and isinstance(k, int) and item & k == k: ret_val.append(v) else: - return "Invalid Value" + return "Invalid Value: No lookup passed" return ret_val @classmethod @@ -5392,7 +5391,7 @@ def _get_advaudit_defaults(option=None): # Get available setting names and GUIDs # This is used to get the fieldnames and GUIDs for individual policies log.debug("Loading auditpol defaults into __context__") - dump = __utils__["auditpol.get_auditpol_dump"]() + dump = salt.utils.win_lgpo_auditpol.get_auditpol_dump() reader = csv.DictReader(dump) audit_defaults = {"fieldnames": reader.fieldnames} for row in reader: @@ -5624,7 +5623,7 @@ def _set_advaudit_pol_data(option, value): "3": "Success and Failure", } defaults = _get_advaudit_defaults(option) - return __utils__["auditpol.set_setting"]( + return salt.utils.win_lgpo_auditpol.set_setting( name=defaults["Auditpol Name"], value=auditpol_values[value] ) diff --git a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py index 5a0600bba57..63bb09eda41 100644 --- a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py +++ b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py @@ -3,6 +3,7 @@ import pytest import salt.loader import salt.modules.win_lgpo as win_lgpo_module import salt.states.win_lgpo as win_lgpo_state +import salt.utils.win_lgpo_auditpol as win_lgpo_auditpol pytestmark = [ pytest.mark.windows_whitelisted, @@ -20,11 +21,16 @@ def configure_loader_modules(minion_opts, modules): "__opts__": minion_opts, "__salt__": modules, "__utils__": utils, + "__context__": {}, }, win_lgpo_module: { "__opts__": minion_opts, "__salt__": modules, "__utils__": utils, + "__context__": {}, + }, + win_lgpo_auditpol: { + "__context__": {}, }, } diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py new file mode 100644 index 00000000000..c06d3ad4b59 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py @@ -0,0 +1,435 @@ +import socket + +import pytest +import win32security +import win32security as ws + +import salt.modules.cmdmod +import salt.modules.win_file +import salt.modules.win_lgpo as win_lgpo +from salt.exceptions import CommandExecutionError +from tests.support.mock import patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(): + return { + win_lgpo: { + "__salt__": { + "cmd.run": salt.modules.cmdmod.run, + "file.file_exists": salt.modules.win_file.file_exists, + "file.remove": salt.modules.win_file.remove, + }, + }, + } + + +@pytest.fixture(scope="module") +def pol_info(): + return win_lgpo._policy_info() + + +@pytest.mark.parametrize( + "val, expected", + ( + (0, False), + (1, True), + ("", False), + ("text", True), + ([], False), + ([1, 2, 3], True), + ), +) +def test_notEmpty(pol_info, val, expected): + assert pol_info._notEmpty(val) is expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (86400, 1), + ), +) +def test_seconds_to_days(pol_info, val, expected): + assert pol_info._seconds_to_days(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (1, 86400), + ), +) +def test_days_to_seconds(pol_info, val, expected): + assert pol_info._days_to_seconds(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (60, 1), + ), +) +def test_seconds_to_minutes(pol_info, val, expected): + assert pol_info._seconds_to_minutes(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (1, 60), + ), +) +def test_minutes_to_seconds(pol_info, val, expected): + assert pol_info._minutes_to_seconds(val) == expected + + +def test_strip_quotes(pol_info): + assert pol_info._strip_quotes('"spongebob"') == "spongebob" + + +def test_add_quotes(pol_info): + assert pol_info._add_quotes("squarepants") == '"squarepants"' + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (chr(0), "Disabled"), + (chr(1), "Enabled"), + (chr(2), "Invalid Value: {!r}".format(chr(2))), + ("patrick", "Invalid Value"), + ), +) +def test_binary_enable_zero_disable_one_conversion(pol_info, val, expected): + assert pol_info._binary_enable_zero_disable_one_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, None), + ("Disabled", chr(0)), + ("Enabled", chr(1)), + ("Junk", None), + ), +) +def test_binary_enable_zero_disable_one_reverse_conversion(pol_info, val, expected): + assert pol_info._binary_enable_zero_disable_one_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("0", "Administrators"), + (0, "Administrators"), + ("", "Administrators"), + ("1", "Administrators and Power Users"), + (1, "Administrators and Power Users"), + ("2", "Administrators and Interactive Users"), + (2, "Administrators and Interactive Users"), + (3, "Not Defined"), + ), +) +def test_dasd_conversion(pol_info, val, expected): + assert pol_info._dasd_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("Administrators", "0"), + ("Administrators and Power Users", "1"), + ("Administrators and Interactive Users", "2"), + ("Not Defined", "9999"), + ("Plankton", "Invalid Value"), + ), +) +def test_dasd_reverse_conversion(pol_info, val, expected): + assert pol_info._dasd_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("Not Defined", True), + (None, False), + (1, True), + (3, False), + ("spongebob", False), + ), +) +def test_in_range_inclusive(pol_info, val, expected): + assert pol_info._in_range_inclusive(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("3,1,2", "Not Defined"), + ("3,0", "Silently Succeed"), + ("3,1", "Warn but allow installation"), + ("3,2", "Do not allow installation"), + ("3,Not Defined", "Not Defined"), + ("3,spongebob", "Invalid Value"), + ), +) +def test_driver_signing_reg_conversion(pol_info, val, expected): + assert pol_info._driver_signing_reg_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("Silently Succeed", "3,0"), + ("Warn but allow installation", f"3,{chr(1)}"), + ("Do not allow installation", f"3,{chr(2)}"), + ("spongebob", "Invalid Value"), + ), +) +def test_driver_signing_reg_reverse_conversion(pol_info, val, expected): + assert pol_info._driver_signing_reg_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (ws.ConvertStringSidToSid("S-1-5-0"), ["S-1-5-0"]), + (ws.ConvertStringSidToSid("S-1-1-0"), ["Everyone"]), + ( + ws.LookupAccountName("", "Administrator")[0], + [f"{socket.gethostname()}\\Administrator"], + ), + ), +) +def test_sidConversion(pol_info, val, expected): + assert pol_info._sidConversion([val]) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, None), + ("", ""), + ), +) +def test_usernamesToSidObjects_empty_value(pol_info, val, expected): + assert pol_info._usernamesToSidObjects(val) == expected + + +def test_usernamesToSidObjects_string_list(pol_info): + val = "Administrator,Guest" + admin_sid = win32security.LookupAccountName("", "Administrator")[0] + guest_sid = win32security.LookupAccountName("", "Guest")[0] + expected = [admin_sid, guest_sid] + assert pol_info._usernamesToSidObjects(val) == expected + + +def test_usernamesToSidObjects_string_list_error(pol_info): + val = "spongebob,squarepants" + with pytest.raises(CommandExecutionError): + pol_info._usernamesToSidObjects(val) + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Configured"), + ("None", "Not Configured"), + ("true", "Run Windows PowerShell scripts first"), + ("false", "Run Windows PowerShell scripts last"), + ("spongebob", "Invalid Value"), + ), +) +def test_powershell_script_order_conversion(pol_info, val, expected): + assert pol_info._powershell_script_order_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("Not Configured", None), + ("Run Windows PowerShell scripts first", "true"), + ("Run Windows PowerShell scripts last", "false"), + ("spongebob", "Invalid Value"), + ), +) +def test_powershell_script_order_reverse_conversion(pol_info, val, expected): + assert pol_info._powershell_script_order_reverse_conversion(val) == expected + + +def test_dict_lookup(pol_info): + lookup = { + "spongebob": "squarepants", + "patrick": "squidward", + "plankton": "mr.crabs", + } + assert pol_info._dict_lookup("spongebob", lookup=lookup) == "squarepants" + assert ( + pol_info._dict_lookup("squarepants", lookup=lookup, value_lookup=True) + == "spongebob" + ) + assert pol_info._dict_lookup("homer", lookup=lookup) == "Invalid Value" + assert ( + pol_info._dict_lookup("homer", lookup=lookup, value_lookup=True) + == "Invalid Value" + ) + assert pol_info._dict_lookup("homer") == "Invalid Value" + + +def test_dict_lookup_bitwise_add(pol_info): + lookup = { + 0: "spongebob", + 1: "squarepants", + 2: "patrick", + } + assert pol_info._dict_lookup_bitwise_add("Not Defined") is None + assert ( + pol_info._dict_lookup_bitwise_add("not a list", value_lookup=True) + == "Invalid Value: Not a list" + ) + assert ( + pol_info._dict_lookup_bitwise_add([], value_lookup=True) + == "Invalid Value: No lookup passed" + ) + assert ( + pol_info._dict_lookup_bitwise_add("not an int") == "Invalid Value: Not an int" + ) + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == [] + assert ( + pol_info._dict_lookup_bitwise_add( + ["spongebob", "squarepants"], lookup=lookup, value_lookup=True + ) + == 1 + ) + assert pol_info._dict_lookup_bitwise_add(1, lookup=lookup) == ["squarepants"] + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == [] + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup, test_zero=True) == [ + "spongebob" + ] + + +@pytest.mark.parametrize( + "val, expected", + ( + (["list", "of", "items"], ["list", "of", "items"]), + ("Not Defined", None), + ("list,of,items", ["list", "of", "items"]), + (7, "Invalid Value"), + ), +) +def test_multi_string_put_transform(pol_info, val, expected): + assert pol_info._multi_string_put_transform(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (["list", "of", "items"], ["list", "of", "items"]), + (None, "Not Defined"), + ("list,of,items", "Invalid Value"), + (7, "Invalid Value"), + ), +) +def test_multi_string_get_transform(pol_info, val, expected): + assert pol_info._multi_string_get_transform(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("String Item", "String Item"), + ("Not Defined", None), + (7, None), + ), +) +def test_string_put_transform(pol_info, val, expected): + assert pol_info._string_put_transform(val) == expected + + +def test__virtual__(pol_info): + assert win_lgpo.__virtual__() == "lgpo" + with patch("salt.utils.platform.is_windows", return_value=False): + assert win_lgpo.__virtual__() == ( + False, + "win_lgpo: Not a Windows System", + ) + + with patch.object(win_lgpo, "HAS_WINDOWS_MODULES", False): + assert win_lgpo.__virtual__() == ( + False, + "win_lgpo: Required modules failed to load", + ) + + +def test_get_advaudit_defaults(): + with patch.dict(win_lgpo.__context__, {}): + assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames") + + audit_defaults = {"junk": "defaults"} + with patch.dict(win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults}): + assert win_lgpo._get_advaudit_defaults() == audit_defaults + + +def test_get_netsh_value(): + with patch.dict(win_lgpo.__context__, {}): + assert win_lgpo._get_netsh_value("domain", "State") == "ON" + + context = { + "lgpo.netsh_data": { + "domain": { + "State": "ONContext", + "Inbound": "NotConfigured", + "Outbound": "NotConfigured", + "LocalFirewallRules": "NotConfigured", + }, + }, + } + with patch.dict(win_lgpo.__context__, context): + assert win_lgpo._get_netsh_value("domain", "State") == "ONContext" + + +def test_get_secedit_data(tmp_path): + with patch.dict(win_lgpo.__opts__, {"cachedir": str(tmp_path)}): + assert "[System Access]\r\n" in win_lgpo._get_secedit_data() + + +def test_get_secedit_value(tmp_path): + with patch.dict(win_lgpo.__opts__, {"cachedir": str(tmp_path)}): + assert win_lgpo._get_secedit_value("Unicode") == "yes" + assert win_lgpo._get_secedit_value("JunkKey") == "Not Defined" + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, b"\x00\x00"), + ("spongebob", b"s\x00p\x00o\x00n\x00g\x00e\x00b\x00o\x00b\x00\x00\x00"), + ), +) +def test_encode_string(val, expected): + assert win_lgpo._encode_string(val) == expected + + +def test_encode_string_error(): + with pytest.raises(TypeError): + win_lgpo._encode_string(1) From 7cf72073d02dedf932571df2193ddb503eed3ec7 Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 20 Sep 2023 17:19:28 -0600 Subject: [PATCH 014/196] Put __utils__ back in lgpo module --- salt/modules/win_lgpo.py | 4 ++-- .../states/win_lgpo/test_adv_audit_settings_state.py | 4 ---- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index 12819acec8e..d29ab64ae53 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -5391,7 +5391,7 @@ def _get_advaudit_defaults(option=None): # Get available setting names and GUIDs # This is used to get the fieldnames and GUIDs for individual policies log.debug("Loading auditpol defaults into __context__") - dump = salt.utils.win_lgpo_auditpol.get_auditpol_dump() + dump = __utils__["auditpol.get_auditpol_dump"]() reader = csv.DictReader(dump) audit_defaults = {"fieldnames": reader.fieldnames} for row in reader: @@ -5623,7 +5623,7 @@ def _set_advaudit_pol_data(option, value): "3": "Success and Failure", } defaults = _get_advaudit_defaults(option) - return salt.utils.win_lgpo_auditpol.set_setting( + return __utils__["auditpol.set_setting"]( name=defaults["Auditpol Name"], value=auditpol_values[value] ) diff --git a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py index 63bb09eda41..70b1638a849 100644 --- a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py +++ b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py @@ -3,7 +3,6 @@ import pytest import salt.loader import salt.modules.win_lgpo as win_lgpo_module import salt.states.win_lgpo as win_lgpo_state -import salt.utils.win_lgpo_auditpol as win_lgpo_auditpol pytestmark = [ pytest.mark.windows_whitelisted, @@ -29,9 +28,6 @@ def configure_loader_modules(minion_opts, modules): "__utils__": utils, "__context__": {}, }, - win_lgpo_auditpol: { - "__context__": {}, - }, } From 638c7744fd6a1eb03cafbd10c57a153bdcf1448d Mon Sep 17 00:00:00 2001 From: twangboy Date: Tue, 10 Oct 2023 17:15:42 -0600 Subject: [PATCH 015/196] Fix tests --- .../modules/win_lgpo/test__policy_info.py | 30 ++++++++++++++----- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py index c06d3ad4b59..7fbc586456c 100644 --- a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py +++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py @@ -1,19 +1,26 @@ import socket import pytest -import win32security -import win32security as ws import salt.modules.cmdmod import salt.modules.win_file import salt.modules.win_lgpo as win_lgpo +import salt.utils.win_lgpo_auditpol as ap from salt.exceptions import CommandExecutionError from tests.support.mock import patch +try: + import win32security as ws + + HAS_WIN32 = True +except ImportError: + HAS_WIN32 = False + pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.slow_test, + pytest.mark.skipif(not HAS_WIN32, reason="Failed to import win32security"), ] @@ -238,8 +245,8 @@ def test_usernamesToSidObjects_empty_value(pol_info, val, expected): def test_usernamesToSidObjects_string_list(pol_info): val = "Administrator,Guest" - admin_sid = win32security.LookupAccountName("", "Administrator")[0] - guest_sid = win32security.LookupAccountName("", "Guest")[0] + admin_sid = ws.LookupAccountName("", "Administrator")[0] + guest_sid = ws.LookupAccountName("", "Guest")[0] expected = [admin_sid, guest_sid] assert pol_info._usernamesToSidObjects(val) == expected @@ -382,17 +389,26 @@ def test__virtual__(pol_info): def test_get_advaudit_defaults(): - with patch.dict(win_lgpo.__context__, {}): + patch_context = patch.dict(win_lgpo.__context__, {}) + patch_salt = patch.dict( + win_lgpo.__utils__, {"auditpol.get_auditpol_dump": ap.get_auditpol_dump} + ) + with patch_context, patch_salt: assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames") audit_defaults = {"junk": "defaults"} - with patch.dict(win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults}): + patch_context = patch.dict( + win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults} + ) + with patch_context, patch_salt: assert win_lgpo._get_advaudit_defaults() == audit_defaults def test_get_netsh_value(): + with patch.dict(win_lgpo.__context__, {"lgpo.netsh_data": {"domain": {}}}): + win_lgpo._set_netsh_value("domain", "state", "State", "NotConfigured") with patch.dict(win_lgpo.__context__, {}): - assert win_lgpo._get_netsh_value("domain", "State") == "ON" + assert win_lgpo._get_netsh_value("domain", "State") == "NotConfigured" context = { "lgpo.netsh_data": { From 5e8bb1f990b132f96f67cd1fa75f6bf3bac2769e Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 20 Sep 2023 09:45:39 -0600 Subject: [PATCH 016/196] Increase coverage for win_lgpo --- salt/modules/win_lgpo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index d29ab64ae53..12819acec8e 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -5391,7 +5391,7 @@ def _get_advaudit_defaults(option=None): # Get available setting names and GUIDs # This is used to get the fieldnames and GUIDs for individual policies log.debug("Loading auditpol defaults into __context__") - dump = __utils__["auditpol.get_auditpol_dump"]() + dump = salt.utils.win_lgpo_auditpol.get_auditpol_dump() reader = csv.DictReader(dump) audit_defaults = {"fieldnames": reader.fieldnames} for row in reader: @@ -5623,7 +5623,7 @@ def _set_advaudit_pol_data(option, value): "3": "Success and Failure", } defaults = _get_advaudit_defaults(option) - return __utils__["auditpol.set_setting"]( + return salt.utils.win_lgpo_auditpol.set_setting( name=defaults["Auditpol Name"], value=auditpol_values[value] ) From 4a2aec777a799203a3dc1fc273accc4994c3cbae Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 20 Sep 2023 17:19:28 -0600 Subject: [PATCH 017/196] Put __utils__ back in lgpo module --- salt/modules/win_lgpo.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index 12819acec8e..324d49bcba3 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -59,7 +59,6 @@ import salt.utils.files import salt.utils.path import salt.utils.platform import salt.utils.stringutils -import salt.utils.win_lgpo_auditpol import salt.utils.win_lgpo_netsh from salt.exceptions import CommandExecutionError, SaltInvocationError from salt.serializers.configparser import deserialize @@ -5391,7 +5390,7 @@ def _get_advaudit_defaults(option=None): # Get available setting names and GUIDs # This is used to get the fieldnames and GUIDs for individual policies log.debug("Loading auditpol defaults into __context__") - dump = salt.utils.win_lgpo_auditpol.get_auditpol_dump() + dump = __utils__["auditpol.get_auditpol_dump"]() reader = csv.DictReader(dump) audit_defaults = {"fieldnames": reader.fieldnames} for row in reader: @@ -5623,7 +5622,7 @@ def _set_advaudit_pol_data(option, value): "3": "Success and Failure", } defaults = _get_advaudit_defaults(option) - return salt.utils.win_lgpo_auditpol.set_setting( + return __utils__["auditpol.set_setting"]( name=defaults["Auditpol Name"], value=auditpol_values[value] ) From b903999af486f42efe980c1283a90317bfec2412 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Wed, 1 Nov 2023 08:57:18 -0600 Subject: [PATCH 018/196] Don't use parametrize on tests that should be skipped --- .../modules/win_lgpo/test__policy_info.py | 31 ++++++++++++------- 1 file changed, 19 insertions(+), 12 deletions(-) diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py index 7fbc586456c..5626d1d3f79 100644 --- a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py +++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py @@ -217,18 +217,25 @@ def test_driver_signing_reg_reverse_conversion(pol_info, val, expected): assert pol_info._driver_signing_reg_reverse_conversion(val) == expected -@pytest.mark.parametrize( - "val, expected", - ( - (ws.ConvertStringSidToSid("S-1-5-0"), ["S-1-5-0"]), - (ws.ConvertStringSidToSid("S-1-1-0"), ["Everyone"]), - ( - ws.LookupAccountName("", "Administrator")[0], - [f"{socket.gethostname()}\\Administrator"], - ), - ), -) -def test_sidConversion(pol_info, val, expected): +# For the next 3 tests we can't use the parametrized decorator because the +# decorator is evaluated before the imports happen, so the HAS_WIN32 is ignored +# and the decorator tries to evaluate the win32security library on systems +# without pyWin32 +def test_sidConversion_no_conversion(pol_info): + val = ws.ConvertStringSidToSid("S-1-5-0") + expected = ["S-1-5-0"] + assert pol_info._sidConversion([val]) == expected + + +def test_sidConversion_everyone(pol_info): + val = ws.ConvertStringSidToSid("S-1-1-0") + expected = ["Everyone"] + assert pol_info._sidConversion([val]) == expected + + +def test_sidConversion_administrator(pol_info): + val = ws.LookupAccountName("", "Administrator")[0] + expected = [f"{socket.gethostname()}\\Administrator"] assert pol_info._sidConversion([val]) == expected From b2525aa042f77e9d62178ed0d4916f41656e0d70 Mon Sep 17 00:00:00 2001 From: twangboy Date: Mon, 9 Oct 2023 16:18:51 -0600 Subject: [PATCH 019/196] Add tests for yumpkg --- salt/modules/yumpkg.py | 95 +- tests/pytests/unit/modules/test_yumpkg.py | 1117 ++++++++++++++++++++- 2 files changed, 1144 insertions(+), 68 deletions(-) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index 192ea61635a..8b874c5ee08 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -14,6 +14,7 @@ Support for YUM/DNF .. versionadded:: 3003 Support for ``tdnf`` on Photon OS. + """ @@ -43,13 +44,6 @@ import salt.utils.versions from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from salt.utils.versions import LooseVersion -try: - import yum - - HAS_YUM = True -except ImportError: - HAS_YUM = False - log = logging.getLogger(__name__) __HOLD_PATTERN = r"[\w+]+(?:[.-][^-]+)*" @@ -353,67 +347,48 @@ def _get_yum_config(strict_parser=True): This is currently only used to get the reposdir settings, but could be used for other things if needed. - If the yum python library is available, use that, which will give us all of - the options, including all of the defaults not specified in the yum config. - Additionally, they will all be of the correct object type. - - If the yum library is not available, we try to read the yum.conf - directly ourselves with a minimal set of "defaults". + We try to read the yum.conf directly ourselves with a minimal set of + "defaults". """ # in case of any non-fatal failures, these defaults will be used conf = { "reposdir": ["/etc/yum/repos.d", "/etc/yum.repos.d"], } - if HAS_YUM: - try: - yb = yum.YumBase() - yb.preconf.init_plugins = False - for name, value in yb.conf.items(): - conf[name] = value - except (AttributeError, yum.Errors.ConfigError) as exc: - raise CommandExecutionError("Could not query yum config: {}".format(exc)) - except yum.Errors.YumBaseError as yum_base_error: - raise CommandExecutionError( - "Error accessing yum or rpmdb: {}".format(yum_base_error) - ) - else: - # fall back to parsing the config ourselves - # Look for the config the same order yum does - fn = None - paths = ( - "/etc/yum/yum.conf", - "/etc/yum.conf", - "/etc/dnf/dnf.conf", - "/etc/tdnf/tdnf.conf", + # fall back to parsing the config ourselves + # Look for the config the same order yum does + fn = None + paths = ( + "/etc/yum/yum.conf", + "/etc/yum.conf", + "/etc/dnf/dnf.conf", + "/etc/tdnf/tdnf.conf", + ) + for path in paths: + if os.path.exists(path): + fn = path + break + + if not fn: + raise CommandExecutionError( + "No suitable yum config file found in: {}".format(paths) ) - for path in paths: - if os.path.exists(path): - fn = path - break - if not fn: - raise CommandExecutionError( - "No suitable yum config file found in: {}".format(paths) - ) + cp = configparser.ConfigParser(strict=strict_parser) + try: + cp.read(fn) + except OSError as exc: + raise CommandExecutionError("Unable to read from {}: {}".format(fn, exc)) - cp = configparser.ConfigParser(strict=strict_parser) - try: - cp.read(fn) - except OSError as exc: - raise CommandExecutionError("Unable to read from {}: {}".format(fn, exc)) - - if cp.has_section("main"): - for opt in cp.options("main"): - if opt in ("reposdir", "commands", "excludes"): - # these options are expected to be lists - conf[opt] = [x.strip() for x in cp.get("main", opt).split(",")] - else: - conf[opt] = cp.get("main", opt) - else: - log.warning( - "Could not find [main] section in %s, using internal defaults", fn - ) + if cp.has_section("main"): + for opt in cp.options("main"): + if opt in ("reposdir", "commands", "excludes"): + # these options are expected to be lists + conf[opt] = [x.strip() for x in cp.get("main", opt).split(",")] + else: + conf[opt] = cp.get("main", opt) + else: + log.warning("Could not find [main] section in %s, using internal defaults", fn) return conf @@ -2861,7 +2836,7 @@ def group_install(name, skip=(), include=(), **kwargs): if not pkgs: return {} - return install(pkgs=pkgs, **kwargs) + return install(pkgs=list(set(pkgs)), **kwargs) groupinstall = salt.utils.functools.alias_function(group_install, "groupinstall") diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index d2f3a2869ec..31076f2cd02 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -1,18 +1,42 @@ +import configparser import logging import os +from functools import wraps import pytest import salt.modules.cmdmod as cmdmod import salt.modules.pkg_resource as pkg_resource import salt.modules.rpm_lowpkg as rpm -import salt.modules.yumpkg as yumpkg import salt.utils.platform -from salt.exceptions import CommandExecutionError, SaltInvocationError +from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from tests.support.mock import MagicMock, Mock, call, patch log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_unless_on_linux, +] + + +# https://dev.to/stack-labs/how-to-mock-a-decorator-in-python-55jc +def mock_decorator(*args, **kwargs): + """Decorate by doing nothing.""" + + def decorator(f): + @wraps(f) + def decorated_function(*args, **kwargs): + return f(*args, **kwargs) + + return decorated_function + + return decorator + + +patch("salt.utils.decorators.path.which", mock_decorator).start() + +import salt.modules.yumpkg as yumpkg + @pytest.fixture def configure_loader_modules(): @@ -28,7 +52,9 @@ def configure_loader_modules(): "os_family": "RedHat", "osmajorrelease": 7, }, - "__salt__": {"pkg_resource.add_pkg": _add_data}, + "__salt__": { + "pkg_resource.add_pkg": _add_data, + }, }, pkg_resource: {}, } @@ -36,7 +62,6 @@ def configure_loader_modules(): @pytest.fixture(scope="module") def list_repos_var(): - return { "base": { "file": "/etc/yum.repos.d/CentOS-Base.repo", @@ -93,6 +118,71 @@ def yum_and_dnf(request): yield request.param["cmd"] +def test__virtual_normal(): + assert yumpkg.__virtual__() == "pkg" + + +def test__virtual_yumpkg_api(): + with patch.dict(yumpkg.__opts__, {"yum_provider": "yumpkg_api"}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: yumpkg_api provider not available", + ) + + +def test__virtual_exception(): + with patch.dict(yumpkg.__grains__, {"os": 1}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: no yum based system detected", + ) + + +def test__virtual_no_yum(): + with patch.object(yumpkg, "_yum", MagicMock(return_value=None)): + assert yumpkg.__virtual__() == (False, "DNF nor YUM found") + + +def test__virtual_non_yum_system(): + with patch.dict(yumpkg.__grains__, {"os_family": "ubuntu"}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: no yum based system detected", + ) + + +def test_strip_headers(): + output = os.linesep.join(["spongebob", "squarepants", "squidward"]) + args = ("spongebob", "squarepants") + assert yumpkg._strip_headers(output, *args) == "squidward\n" + + +def test_get_copr_repo(): + result = yumpkg._get_copr_repo("copr:spongebob/squarepants") + assert result == "copr:copr.fedorainfracloud.org:spongebob:squarepants" + + +def test_get_hold(): + line = "vim-enhanced-2:7.4.827-1.fc22" + with patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")): + assert yumpkg._get_hold(line) == "vim-enhanced-2:7.4.827-1.fc22" + + +def test_get_options(): + result = yumpkg._get_options( + repo="spongebob", + disableexcludes="squarepants", + __dunder_keyword="this is skipped", + stringvalue="string_value", + boolvalue=True, + get_extra_options=True, + ) + assert "--enablerepo=spongebob" in result + assert "--disableexcludes=squarepants" in result + assert "--stringvalue=string_value" in result + assert "--boolvalue" in result + + def test_list_pkgs(): """ Test packages listing. @@ -468,6 +558,16 @@ def test_list_patches(): assert _patch in patches["my-fake-patch-installed-1234"]["summary"] +def test_list_patches_refresh(): + expected = ["spongebob"] + mock_get_patches = MagicMock(return_value=expected) + patch_get_patches = patch.object(yumpkg, "_get_patches", mock_get_patches) + patch_refresh_db = patch.object(yumpkg, "refresh_db", MagicMock()) + with patch_refresh_db, patch_get_patches: + result = yumpkg.list_patches(refresh=True) + assert result == expected + + def test_latest_version_with_options(): with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})): @@ -559,6 +659,66 @@ def test_latest_version_with_options(): ) +def test_list_repo_pkgs_attribute_error(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + mock_run = MagicMock(return_value="3.4.5") + patch_run = patch.dict(yumpkg.__salt__, {"cmd.run": mock_run}) + mock_yum = MagicMock(return_value={"retcode": 0, "stdout": ""}) + patch_yum = patch.object(yumpkg, "_call_yum", mock_yum) + with patch_get_options, patch_run, patch_yum: + assert yumpkg.list_repo_pkgs(fromrepo=1, disablerepo=2, enablerepo=3) == {} + + +def test_list_repo_pkgs_byrepo(list_repos_var): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + stdout_installed = """\ +Installed Packages +spongebob.x86_64 1.1.el9_1 @bikini-bottom-rpms +squarepants.x86_64 1.2.el9_1 @bikini-bottom-rpms +patrick.noarch 1.3.el9_1 @rock-bottom-rpms +squidward.x86_64 1.4.el9_1 @rock-bottom-rpms""" + stdout_available = """\ +Available Packages +plankton.noarch 2.1-1.el9_2 bikini-bottom-rpms +dennis.x86_64 2.2-2.el9 bikini-bottom-rpms +man-ray.x86_64 2.3-1.el9_2 bikini-bottom-rpms +doodlebob.x86_64 2.4-1.el9_2 bikini-bottom-rpms""" + run_all_side_effect = ( + {"retcode": 0, "stdout": stdout_installed}, + {"retcode": 0, "stdout": stdout_available}, + ) + patch_salt = patch.dict( + yumpkg.__salt__, + { + "cmd.run": MagicMock(return_value="3.4.5"), + "cmd.run_all": MagicMock(side_effect=run_all_side_effect), + "config.get": MagicMock(return_value=False), + }, + ) + patch_list_repos = patch.object( + yumpkg, + "list_repos", + MagicMock(return_value=list_repos_var), + ) + with patch_get_options, patch_salt, patch_list_repos: + expected = { + "bikini-bottom-rpms": { + "dennis": ["2.2-2.el9"], + "doodlebob": ["2.4-1.el9_2"], + "man-ray": ["2.3-1.el9_2"], + "plankton": ["2.1-1.el9_2"], + "spongebob": ["1.1.el9_1"], + "squarepants": ["1.2.el9_1"], + }, + "rock-bottom-rpms": { + "patrick": ["1.3.el9_1"], + "squidward": ["1.4.el9_1"], + }, + } + result = yumpkg.list_repo_pkgs(byrepo=True) + assert result == expected + + def test_list_repo_pkgs_with_options(list_repos_var): """ Test list_repo_pkgs with and without fromrepo @@ -762,6 +922,87 @@ def test_list_upgrades_dnf(): ) +def test_list_upgrades_refresh(): + mock_call_yum = MagicMock(return_value={"retcode": 0, "stdout": ""}) + with patch.object(yumpkg, "refresh_db", MagicMock()): + with patch.object(yumpkg, "_call_yum", mock_call_yum): + assert yumpkg.list_upgrades(refresh=True) == {} + + +def test_list_upgrades_error(): + mock_return = {"retcode": 1, "Error:": "Error"} + mock_call_yum = MagicMock(return_value=mock_return) + with patch.object(yumpkg, "_call_yum", mock_call_yum): + assert yumpkg.list_upgrades(refresh=False) == {} + + +def test_list_downloaded(): + mock_walk = MagicMock( + return_value=[ + ( + "/var/cache/yum", + [], + ["pkg1-3.1-16.1.x86_64.rpm", "pkg2-1.2-13.2.x86_64.rpm"], + ) + ] + ) + mock_pkginfo = MagicMock( + side_effect=[ + { + "name": "pkg1", + "version": "3.1", + }, + { + "name": "pkg2", + "version": "1.2", + }, + ] + ) + mock_getctime = MagicMock(return_value=1696536082.861206) + mock_getsize = MagicMock(return_value=75701688) + with patch.dict(yumpkg.__salt__, {"lowpkg.bin_pkg_info": mock_pkginfo}), patch( + "salt.utils.path.os_walk", mock_walk + ), patch("os.path.getctime", mock_getctime), patch("os.path.getsize", mock_getsize): + result = yumpkg.list_downloaded() + expected = { + "pkg1": { + "3.1": { + "creation_date_time": "2023-10-05T14:01:22", + "creation_date_time_t": 1696536082, + "path": "/var/cache/yum/pkg1-3.1-16.1.x86_64.rpm", + "size": 75701688, + }, + }, + "pkg2": { + "1.2": { + "creation_date_time": "2023-10-05T14:01:22", + "creation_date_time_t": 1696536082, + "path": "/var/cache/yum/pkg2-1.2-13.2.x86_64.rpm", + "size": 75701688, + }, + }, + } + assert ( + result["pkg1"]["3.1"]["creation_date_time_t"] + == expected["pkg1"]["3.1"]["creation_date_time_t"] + ) + assert result["pkg1"]["3.1"]["path"] == expected["pkg1"]["3.1"]["path"] + assert result["pkg1"]["3.1"]["size"] == expected["pkg1"]["3.1"]["size"] + assert ( + result["pkg2"]["1.2"]["creation_date_time_t"] + == expected["pkg2"]["1.2"]["creation_date_time_t"] + ) + assert result["pkg2"]["1.2"]["path"] == expected["pkg2"]["1.2"]["path"] + assert result["pkg2"]["1.2"]["size"] == expected["pkg2"]["1.2"]["size"] + + +def test_list_installed_patches(): + mock_get_patches = MagicMock(return_value="spongebob") + with patch.object(yumpkg, "_get_patches", mock_get_patches): + result = yumpkg.list_installed_patches() + assert result == "spongebob" + + def test_list_upgrades_yum(): """ The subcommand should be "updates" with yum @@ -815,6 +1056,202 @@ def test_list_upgrades_yum(): ) +def test_modified(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.modified": mock}): + yumpkg.modified("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + +def test_clean_metadata_with_options(): + + with patch("salt.utils.pkg.clear_rtag", Mock()): + + # With check_update=True we will do a cmd.run to run the clean_cmd, and + # then a separate cmd.retcode to check for updates. + + # with fromrepo + yum_call = MagicMock() + with patch.dict( + yumpkg.__salt__, + {"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)}, + ): + yumpkg.clean_metadata(check_update=True, fromrepo="good", branch="foo") + + assert yum_call.call_count == 2 + yum_call.assert_any_call( + [ + "yum", + "--quiet", + "--assumeyes", + "clean", + "expire-cache", + "--disablerepo=*", + "--enablerepo=good", + "--branch=foo", + ], + env={}, + ignore_retcode=True, + output_loglevel="trace", + python_shell=False, + ) + yum_call.assert_any_call( + [ + "yum", + "--quiet", + "--assumeyes", + "check-update", + "--setopt=autocheck_running_kernel=false", + "--disablerepo=*", + "--enablerepo=good", + "--branch=foo", + ], + output_loglevel="trace", + env={}, + ignore_retcode=True, + python_shell=False, + ) + + +def test_del_repo_error(): + basedir = "/mr/krabs" + ret_dict = { + "spongebob": {"file": "/square/pants"}, + "patrick": {"file": "/squid/ward"}, + } + mock_list = MagicMock(return_value=ret_dict) + patch_list = patch.object(yumpkg, "list_repos", mock_list) + with patch_list: + result = yumpkg.del_repo("plankton", basedir=basedir) + expected = "Error: the plankton repo does not exist in ['/mr/krabs']" + assert result == expected + + result = yumpkg.del_repo("copr:plankton/karen", basedir=basedir) + expected = "Error: the copr:copr.fedorainfracloud.org:plankton:karen repo does not exist in ['/mr/krabs']" + assert result == expected + + +def test_del_repo_single_file(): + basedir = "/mr/krabs" + ret_dict = { + "spongebob": {"file": "/square/pants"}, + "patrick": {"file": "/squid/ward"}, + } + mock_list = MagicMock(return_value=ret_dict) + patch_list = patch.object(yumpkg, "list_repos", mock_list) + with patch_list, patch("os.remove"): + result = yumpkg.del_repo("spongebob", basedir=basedir) + expected = "File /square/pants containing repo spongebob has been removed" + assert result == expected + + +def test_download_error_no_packages(): + with pytest.raises(SaltInvocationError): + yumpkg.download() + + +def test_download(): + patch_exists = patch("os.path.exists", MagicMock(return_value=False)) + patch_makedirs = patch("os.makedirs") + mock_listdir = MagicMock(side_effect=([], ["spongebob-1.2.rpm"])) + patch_listdir = patch("os.listdir", mock_listdir) + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_exists, patch_makedirs, patch_listdir, patch_salt: + result = yumpkg.download("spongebob") + cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + assert result == expected + + +def test_download_failed(): + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink") + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_exists, patch_listdir, patch_unlink, patch_salt: + result = yumpkg.download("spongebob", "patrick") + cmd = [ + "yumdownloader", + "-q", + "--destdir=/var/cache/yum/packages", + "spongebob", + "patrick", + ] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = { + "_error": "The following package(s) failed to download: patrick", + "spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm", + } + assert result == expected + + +def test_download_to_purge(): + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink") + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_exists, patch_listdir, patch_unlink, patch_salt: + result = yumpkg.download("spongebob") + cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + assert result == expected + + +def test_download_unlink_error(): + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + se_listdir = ( + ["spongebob-1.2.rpm", "junk.txt"], + ["spongebob1.2.rpm", "junk.txt"], + ) + mock_listdir = MagicMock(side_effect=se_listdir) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink", MagicMock(side_effect=OSError)) + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_exists, patch_listdir, patch_unlink, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.download("spongebob") + + +def test_file_dict(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.file_dict": mock}): + yumpkg.file_dict("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + +def test_file_list(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.file_list": mock}): + yumpkg.file_list("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + def test_refresh_db_with_options(): with patch("salt.utils.pkg.clear_rtag", Mock()): @@ -1040,6 +1477,36 @@ def test_install_with_options(): ) +def test_remove_retcode_error(): + """ + Tests that we throw an error if retcode isn't 0 + """ + name = "foo" + installed = "8:3.8.12-4.n.el7" + list_pkgs_mock = MagicMock( + side_effect=lambda **kwargs: { + name: [installed] if kwargs.get("versions_as_list", False) else installed + } + ) + cmd_mock = MagicMock( + return_value={"pid": 12345, "retcode": 1, "stdout": "", "stderr": "error"} + ) + salt_mock = { + "cmd.run_all": cmd_mock, + "lowpkg.version_cmp": rpm.version_cmp, + "pkg_resource.parse_targets": MagicMock( + return_value=({name: installed}, "repository") + ), + } + with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) + ), patch.dict(yumpkg.__salt__, salt_mock), patch.dict( + yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} + ): + with pytest.raises(CommandExecutionError): + yumpkg.remove("spongebob") + + def test_remove_with_epoch(): """ Tests that we properly identify a version containing an epoch for @@ -1228,6 +1695,54 @@ def test_install_with_epoch(): assert call == expected, call +def test_install_minion_error(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + patch_salt = patch.dict( + yumpkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock(side_effect=MinionError), + }, + ) + with patch_get_options, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.install("spongebob") + + +def test_install_no_pkg_params(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + parse_return = ("", "junk") + patch_salt = patch.dict( + yumpkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock(return_value=parse_return), + }, + ) + with patch_get_options, patch_salt: + assert yumpkg.install("spongebob") == {} + + +# My dufus attempt... but I gave up +# def test_install_repo_fancy_versions(): +# patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) +# packages = { +# "spongbob": "1*", +# "squarepants": ">1.2", +# } +# parse_return = (packages, "repository") +# patch_salt = patch.dict( +# yumpkg.__salt__, +# { +# "pkg_resource.parse_targets": MagicMock(return_value=parse_return), +# }, +# ) +# list_pkgs = {"vim": "1.1,1.2", "git": "2.1,2.2"} +# list_pkgs_list = {"vim": ["1.1", "1.2"], "git": ["2.1", "2.2"]} +# mock_list_pkgs = MagicMock(side_effect=(list_pkgs, list_pkgs_list)) +# patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) +# with patch_get_options, patch_salt, patch_list_pkgs: +# assert yumpkg.install("spongebob") == {} + + @pytest.mark.skipif(not salt.utils.platform.is_linux(), reason="Only run on Linux") def test_install_error_reporting(): """ @@ -1266,6 +1781,13 @@ def test_install_error_reporting(): assert exc_info.value.info == expected, exc_info.value.info +def test_remove_error(): + mock_salt = {"pkg_resource.parse_targets": MagicMock(side_effect=MinionError)} + with patch.dict(yumpkg.__salt__, mock_salt): + with pytest.raises(CommandExecutionError): + yumpkg.remove("spongebob") + + def test_remove_not_installed(): """ Tests that no exception raised on removing not installed package @@ -1303,6 +1825,17 @@ def test_remove_not_installed(): cmd_mock.assert_not_called() +def test_upgrade_error(): + patch_yum = patch.object(yumpkg, "_yum", return_value="yum") + patch_get_options = patch.object(yumpkg, "_get_options") + patch_list_pkgs = patch.object(yumpkg, "list_pkgs") + salt_dict = {"pkg_resource.parse_targets": MagicMock(side_effect=MinionError)} + patch_salt = patch.dict(yumpkg.__salt__, salt_dict) + with patch_yum, patch_get_options, patch_list_pkgs, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.upgrade("spongebob", refresh=False) + + def test_upgrade_with_options(): with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch( "salt.utils.systemd.has_scope", MagicMock(return_value=False) @@ -1317,6 +1850,7 @@ def test_upgrade_with_options(): exclude="kernel*", branch="foo", setopt="obsoletes=0,plugins=0", + skip_verify=True, ) cmd.assert_called_once_with( [ @@ -1331,6 +1865,7 @@ def test_upgrade_with_options(): "--setopt", "plugins=0", "--exclude=kernel*", + "--nogpgcheck", "upgrade", ], env={}, @@ -1338,6 +1873,19 @@ def test_upgrade_with_options(): python_shell=False, ) + # with fromrepo + cmd = MagicMock(return_value={"retcode": 1}) + with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}): + with pytest.raises(CommandExecutionError): + yumpkg.upgrade( + refresh=False, + fromrepo="good", + exclude="kernel*", + branch="foo", + setopt="obsoletes=0,plugins=0", + skip_verify=True, + ) + # without fromrepo cmd = MagicMock(return_value={"retcode": 0}) with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}): @@ -1370,6 +1918,64 @@ def test_upgrade_with_options(): ) +def test_upgrade_available(): + mock_return = MagicMock(return_value="non-empty value") + patch_latest_version = patch.object(yumpkg, "latest_version", mock_return) + with patch_latest_version: + assert yumpkg.upgrade_available("foo") is True + + +def test_verify_args(): + mock_verify = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.verify": mock_verify}): + yumpkg.verify("spongebob") + mock_verify.assert_called_once_with("spongebob") + + +def test_verify_kwargs(): + mock_verify = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.verify": mock_verify}): + yumpkg.verify(spongebob="squarepants") + mock_verify.assert_called_once_with(spongebob="squarepants") + + +def test_purge_not_installed(): + """ + Tests that no exception raised on purging not installed package + """ + name = "foo" + list_pkgs_mock = MagicMock(return_value={}) + cmd_mock = MagicMock( + return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} + ) + salt_mock = { + "cmd.run_all": cmd_mock, + "lowpkg.version_cmp": rpm.version_cmp, + "pkg_resource.parse_targets": MagicMock( + return_value=({name: None}, "repository") + ), + } + with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) + ), patch.dict(yumpkg.__salt__, salt_mock): + + # Test yum + with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict( + yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} + ): + yumpkg.purge(name) + cmd_mock.assert_not_called() + + # Test dnf + yumpkg.__context__.pop("yum_bin") + cmd_mock.reset_mock() + with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict( + yumpkg.__grains__, {"os": "Fedora", "osrelease": 27} + ): + yumpkg.purge(name) + cmd_mock.assert_not_called() + + def test_info_installed_with_all_versions(): """ Test the return information of all versions for the named package(s), installed on the system. @@ -1525,6 +2131,260 @@ def test_pkg_hold_tdnf(): yumpkg.hold("foo") +def test_hold_empty(): + """ + Tests that we raise a SaltInvocationError if nothing is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.hold() + + +def test_hold_pkgs_and_sources_error(): + """ + Tests that we raise a SaltInvocationError if both pkgs and sources is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.hold(pkgs=["foo", "bar"], sources=["src1", "src2"]) + + +def test_hold_pkgs_sources(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + expected = { + "foo": { + "name": "foo", + "changes": { + "new": "hold", + "old": "", + }, + "result": True, + "comment": "Package foo is now being held.", + }, + "bar": { + "name": "bar", + "changes": { + "new": "hold", + "old": "", + }, + "result": True, + "comment": "Package bar is now being held.", + }, + } + sources = [{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}] + pkgs = ["foo", "bar"] + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(sources=sources) + assert result == expected + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(pkgs=pkgs) + assert result == expected + + +def test_hold_test_true(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": True}) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": None, + "comment": "Package foo is set to be held.", + }, + } + assert result == expected + + +def test_hold_fails(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 1}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": False, + "comment": "Package foo was unable to be held.", + }, + } + assert result == expected + + +def test_hold_already_held(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + with patch_versionlock, patch_list_holds: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": True, + "comment": "Package foo is already set to be held.", + }, + } + assert result == expected + + +def test_unhold_empty(): + """ + Tests that we raise a SaltInvocationError if nothing is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.unhold() + + +def test_unhold_pkgs_and_sources_error(): + """ + Tests that we raise a SaltInvocationError if both pkgs and sources is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.unhold(pkgs=["foo", "bar"], sources=["src1", "src2"]) + + +def test_unhold_pkgs_sources(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo", "bar"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + expected = { + "foo": { + "name": "foo", + "changes": { + "new": "", + "old": "hold", + }, + "result": True, + "comment": "Package foo is no longer held.", + }, + "bar": { + "name": "bar", + "changes": { + "new": "", + "old": "hold", + }, + "result": True, + "comment": "Package bar is no longer held.", + }, + } + sources = [{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}] + pkgs = ["foo", "bar"] + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(sources=sources) + assert result == expected + + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(pkgs=pkgs) + assert result == expected + + +def test_unhold_test_true(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + patch_opts = patch.dict(yumpkg.__opts__, {"test": True}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + with patch_versionlock, patch_list_holds, patch_opts, patch_yum: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": None, + "comment": "Package foo is set to be unheld.", + }, + } + assert result == expected + + +def test_unhold_fails(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + mock_call_yum = MagicMock(return_value={"retcode": 1}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": False, + "comment": "Package foo was unable to be unheld.", + }, + } + assert result == expected + + +def test_unhold_already_unheld(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=[]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + with patch_versionlock, patch_list_holds: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": True, + "comment": "Package foo is not being held.", + }, + } + assert result == expected + + +def test_owner_empty(): + assert yumpkg.owner() == "" + + +def test_owner_not_owned(): + mock_stdout = MagicMock(return_value="not owned") + expected = { + "/fake/path1": "", + "/fake/path2": "", + } + with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_stdout}): + result = yumpkg.owner(*expected.keys()) + assert result == expected + + +def test_owner_not_owned_single(): + mock_stdout = MagicMock(return_value="not owned") + with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_stdout}): + result = yumpkg.owner("/fake/path") + assert result == "" + + +def test_parse_repo_file_error(): + mock_read = MagicMock( + side_effect=configparser.MissingSectionHeaderError("spongebob", 101, "test2") + ) + with patch.object(configparser.ConfigParser, "read", mock_read): + result = yumpkg._parse_repo_file("spongebob") + assert result == ("", {}) + + def test_pkg_hold_dnf(): """ Tests that we properly identify versionlock plugin when using dnf @@ -1606,14 +2466,72 @@ def test_pkg_hold_dnf(): ) -@pytest.mark.skipif(not yumpkg.HAS_YUM, reason="Could not import yum") -def test_yum_base_error(): - with patch("yum.YumBase") as mock_yum_yumbase: - mock_yum_yumbase.side_effect = CommandExecutionError +def test_get_yum_config_no_config(): + with patch("os.path.exists", MagicMock(return_value=False)): with pytest.raises(CommandExecutionError): yumpkg._get_yum_config() +def test_get_yum_config(grains): + os_family = grains["os_family"] + if os_family in ("Arch", "Debian", "Suse"): + pytest.skip(f"{os_family} does not have yum.conf") + setting = "cache_dir" + if os_family == "RedHat": + setting = "skip_if_unavailable" + result = yumpkg._get_yum_config() + assert setting in result + + +def test_get_yum_config_value_none(grains): + os_family = grains["os_family"] + if os_family in ("Arch", "Debian", "Suse"): + pytest.skip(f"{os_family} does not have yum.conf") + result = yumpkg._get_yum_config_value("spongebob") + assert result is None + + +def test_get_yum_config_unreadable(): + with patch.object( + configparser.ConfigParser, "read", MagicMock(side_effect=OSError) + ): + with pytest.raises(CommandExecutionError): + yumpkg._get_yum_config() + + +def test_normalize_basedir_str(): + basedir = "/etc/yum/yum.conf,/etc/yum.conf" + result = yumpkg._normalize_basedir(basedir) + assert result == ["/etc/yum/yum.conf", "/etc/yum.conf"] + + +def test_normalize_basedir_error(): + basedir = 1 + with pytest.raises(SaltInvocationError): + yumpkg._normalize_basedir(basedir) + + +def test_normalize_name_noarch(): + assert yumpkg.normalize_name("zsh.noarch") == "zsh" + + +def test_latest_version_no_names(): + assert yumpkg.latest_version() == "" + + +def test_latest_version_nonzero_retcode(): + yum_ret = {"retcode": 1, "stderr": "some error"} + mock_call_yum = MagicMock(return_value=yum_ret) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + list_pkgs_ret = {"foo": "1.1", "bar": "2.2"} + mock_list_pkgs = MagicMock(return_value=list_pkgs_ret) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + patch_refresh_db = patch.object(yumpkg, "refresh_db", MagicMock()) + with patch_list_pkgs, patch_call_yum, patch_get_options, patch_refresh_db: + assert yumpkg.latest_version("foo", "bar") == {"foo": "", "bar": ""} + + def test_group_info(): """ Test yumpkg.group_info parsing @@ -1855,6 +2773,180 @@ def test_group_info(): assert info == expected +def test_group_install(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs") + _, kwargs = yumpkg.install.call_args + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_include(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs", include="napoleon") + _, kwargs = yumpkg.install.call_args + expected.append("napoleon") + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_skip(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs", skip="plankton") + _, kwargs = yumpkg.install.call_args + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_already_present(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + mock_list_pkgs = MagicMock(return_value=expected) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + with patch_info, patch_list_pkgs, patch_install: + assert yumpkg.group_install("spongebob,mr_krabs") == {} + + +def test_group_install_no_groups(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(None) + + +def test_group_install_non_list_groups(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(1) + + +def test_group_install_non_list_skip(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(name="string", skip=1) + + +def test_group_install_non_list_include(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(name="string", include=1) + + +def test_group_list(): + mock_out = MagicMock( + return_value="""\ +Available Environment Groups: + Spongebob + Squarepants +Installed Environment Groups: + Patrick +Installed Groups: + Squidward + Sandy +Available Groups: + Mr. Krabs + Plankton +Available Language Groups: + Gary the Snail [sb]\ + """ + ) + patch_grplist = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_out}) + with patch_grplist: + result = yumpkg.group_list() + expected = { + "installed": ["Squidward", "Sandy"], + "available": ["Mr. Krabs", "Plankton"], + "installed environments": ["Patrick"], + "available environments": ["Spongebob", "Squarepants"], + "available languages": { + "Gary the Snail [sb]": { + "language": "sb", + "name": "Gary the Snail", + }, + }, + } + assert result == expected + + def test_get_repo_with_existent_repo(list_repos_var): """ Test get_repo with an existent repository @@ -2063,6 +3155,15 @@ def test_services_need_restart_requires_dnf(): pytest.raises(CommandExecutionError, yumpkg.services_need_restart) +def test_services_need_restart_no_dnf_output(): + patch_yum = patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")) + patch_booted = patch("salt.utils.systemd.booted", Mock(return_value=True)) + mock_run_stdout = MagicMock(return_value="") + patch_run_stdout = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_run_stdout}) + with patch_yum, patch_booted, patch_run_stdout: + assert yumpkg.services_need_restart() == [] + + def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs(): patch_list_pkgs = patch( "salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True From bd469106f7d0185b849ccf212965dc7063ced376 Mon Sep 17 00:00:00 2001 From: twangboy Date: Tue, 24 Oct 2023 09:16:36 -0600 Subject: [PATCH 020/196] Remove decorator for yumpkg.download --- salt/modules/yumpkg.py | 5 +-- tests/pytests/unit/modules/test_yumpkg.py | 41 ++++++++++------------- 2 files changed, 20 insertions(+), 26 deletions(-) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index 8b874c5ee08..f794389c861 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -30,7 +30,6 @@ import string import salt.utils.args import salt.utils.data -import salt.utils.decorators.path import salt.utils.environment import salt.utils.files import salt.utils.functools @@ -3333,7 +3332,6 @@ def modified(*packages, **flags): return __salt__["lowpkg.modified"](*packages, **flags) -@salt.utils.decorators.path.which("yumdownloader") def download(*packages, **kwargs): """ .. versionadded:: 2015.5.0 @@ -3353,6 +3351,9 @@ def download(*packages, **kwargs): salt '*' pkg.download httpd salt '*' pkg.download httpd postfix """ + if not salt.utils.path.which("yumdownloader"): + raise CommandExecutionError("'yumdownloader' command not available") + if not packages: raise SaltInvocationError("No packages were specified") diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index 31076f2cd02..b3f32d8a9ce 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -8,6 +8,7 @@ import pytest import salt.modules.cmdmod as cmdmod import salt.modules.pkg_resource as pkg_resource import salt.modules.rpm_lowpkg as rpm +import salt.modules.yumpkg as yumpkg import salt.utils.platform from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from tests.support.mock import MagicMock, Mock, call, patch @@ -19,25 +20,6 @@ pytestmark = [ ] -# https://dev.to/stack-labs/how-to-mock-a-decorator-in-python-55jc -def mock_decorator(*args, **kwargs): - """Decorate by doing nothing.""" - - def decorator(f): - @wraps(f) - def decorated_function(*args, **kwargs): - return f(*args, **kwargs) - - return decorated_function - - return decorator - - -patch("salt.utils.decorators.path.which", mock_decorator).start() - -import salt.modules.yumpkg as yumpkg - - @pytest.fixture def configure_loader_modules(): def _add_data(data, key, value): @@ -1146,11 +1128,13 @@ def test_del_repo_single_file(): def test_download_error_no_packages(): - with pytest.raises(SaltInvocationError): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + with patch_which, pytest.raises(SaltInvocationError): yumpkg.download() def test_download(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) patch_exists = patch("os.path.exists", MagicMock(return_value=False)) patch_makedirs = patch("os.makedirs") mock_listdir = MagicMock(side_effect=([], ["spongebob-1.2.rpm"])) @@ -1160,7 +1144,7 @@ def test_download(): "cmd.run": mock_run, } patch_salt = patch.dict(yumpkg.__salt__, dict_salt) - with patch_exists, patch_makedirs, patch_listdir, patch_salt: + with patch_which, patch_exists, patch_makedirs, patch_listdir, patch_salt: result = yumpkg.download("spongebob") cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] mock_run.assert_called_once_with( @@ -1171,6 +1155,7 @@ def test_download(): def test_download_failed(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) patch_exists = patch("os.path.exists", MagicMock(return_value=True)) mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) patch_listdir = patch("os.listdir", mock_listdir) @@ -1180,7 +1165,7 @@ def test_download_failed(): "cmd.run": mock_run, } patch_salt = patch.dict(yumpkg.__salt__, dict_salt) - with patch_exists, patch_listdir, patch_unlink, patch_salt: + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: result = yumpkg.download("spongebob", "patrick") cmd = [ "yumdownloader", @@ -1199,7 +1184,14 @@ def test_download_failed(): assert result == expected +def test_download_missing_yumdownloader(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value=None)) + with patch_which, pytest.raises(CommandExecutionError): + yumpkg.download("spongebob") + + def test_download_to_purge(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) patch_exists = patch("os.path.exists", MagicMock(return_value=True)) mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) patch_listdir = patch("os.listdir", mock_listdir) @@ -1209,7 +1201,7 @@ def test_download_to_purge(): "cmd.run": mock_run, } patch_salt = patch.dict(yumpkg.__salt__, dict_salt) - with patch_exists, patch_listdir, patch_unlink, patch_salt: + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: result = yumpkg.download("spongebob") cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] mock_run.assert_called_once_with( @@ -1220,6 +1212,7 @@ def test_download_to_purge(): def test_download_unlink_error(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) patch_exists = patch("os.path.exists", MagicMock(return_value=True)) se_listdir = ( ["spongebob-1.2.rpm", "junk.txt"], @@ -1233,7 +1226,7 @@ def test_download_unlink_error(): "cmd.run": mock_run, } patch_salt = patch.dict(yumpkg.__salt__, dict_salt) - with patch_exists, patch_listdir, patch_unlink, patch_salt: + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: with pytest.raises(CommandExecutionError): yumpkg.download("spongebob") From 5e50ccf31c24f939050b59bed531a59ebf7d2bcc Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 25 Oct 2023 16:59:28 -0600 Subject: [PATCH 021/196] Fix rsync on Windows using cwRsync --- tools/vm.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/tools/vm.py b/tools/vm.py index d4aefd9837b..11dc9acbad9 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -1326,11 +1326,13 @@ class VM: # Remote repo path remote_path = self.upload_path.as_posix() rsync_remote_path = remote_path - if self.is_windows: + if sys.platform == "win32": for drive in ("c:", "C:"): source = source.replace(drive, "/cygdrive/c") - rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") source = source.replace("\\", "/") + if self.is_windows: + for drive in ("c:", "C:"): + rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") destination = f"{self.name}:{rsync_remote_path}" description = "Rsync local checkout to VM..." self.rsync(source, destination, description, rsync_flags) @@ -1520,16 +1522,17 @@ class VM: self.ctx.exit(1, "Could find the 'rsync' binary") if TYPE_CHECKING: assert rsync + ssh_cmd = " ".join( + self.ssh_command_args( + include_vm_target=False, log_command_level=logging.NOTSET + ) + ) cmd: list[str] = [ - rsync, + f'"{rsync}"', "-az", "--info=none,progress2", "-e", - " ".join( - self.ssh_command_args( - include_vm_target=False, log_command_level=logging.NOTSET - ) - ), + fr'"{ssh_cmd}"', ] if rsync_flags: cmd.extend(rsync_flags) @@ -1542,6 +1545,8 @@ class VM: log.info(f"Running {' '.join(cmd)!r}") # type: ignore[arg-type] progress = create_progress_bar(transient=True) task = progress.add_task(description, total=100) + if sys.platform == "win32": + cmd = " ".join(cmd) with progress: proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, text=True) completed = 0 @@ -1584,7 +1589,7 @@ class VM: if TYPE_CHECKING: assert ssh _ssh_command_args = [ - ssh, + f"'{ssh}'", "-F", str(self.ssh_config_file.relative_to(tools.utils.REPO_ROOT)), ] From 07bb453204ad52a975b64bc2d66af52da0b290eb Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 25 Oct 2023 17:08:48 -0600 Subject: [PATCH 022/196] Fix test_get_yum_config --- tests/pytests/unit/modules/test_yumpkg.py | 6 ++++-- tools/vm.py | 8 ++++---- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index b3f32d8a9ce..5ebf69adf58 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -1,7 +1,6 @@ import configparser import logging import os -from functools import wraps import pytest @@ -2471,7 +2470,10 @@ def test_get_yum_config(grains): pytest.skip(f"{os_family} does not have yum.conf") setting = "cache_dir" if os_family == "RedHat": - setting = "skip_if_unavailable" + # This one seems to be in all of them... + # If this ever breaks in the future, we'll need to get more specific + # than os_family + setting = "installonly_limit" result = yumpkg._get_yum_config() assert setting in result diff --git a/tools/vm.py b/tools/vm.py index 11dc9acbad9..97cd52f9f37 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -1528,11 +1528,11 @@ class VM: ) ) cmd: list[str] = [ - f'"{rsync}"', + f'"{rsync}"' if sys.platform == "win32" else rsync, "-az", "--info=none,progress2", "-e", - fr'"{ssh_cmd}"', + f'"{ssh_cmd}"' if sys.platform == "win32" else ssh_cmd, ] if rsync_flags: cmd.extend(rsync_flags) @@ -1546,7 +1546,7 @@ class VM: progress = create_progress_bar(transient=True) task = progress.add_task(description, total=100) if sys.platform == "win32": - cmd = " ".join(cmd) + cmd = [" ".join(cmd)] with progress: proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, text=True) completed = 0 @@ -1589,7 +1589,7 @@ class VM: if TYPE_CHECKING: assert ssh _ssh_command_args = [ - f"'{ssh}'", + ssh, "-F", str(self.ssh_config_file.relative_to(tools.utils.REPO_ROOT)), ] From 8e9368263f276344d966169c939dbb30b76e728f Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Mon, 30 Oct 2023 14:17:57 -0600 Subject: [PATCH 023/196] Add test when config is missing main --- tests/pytests/unit/modules/test_yumpkg.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index 5ebf69adf58..188ed58ec7e 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -2494,6 +2494,15 @@ def test_get_yum_config_unreadable(): yumpkg._get_yum_config() +def test_get_yum_config_no_main(caplog): + mock_false = MagicMock(return_value=False) + with patch.object(configparser.ConfigParser, "read"), patch.object( + configparser.ConfigParser, "has_section", mock_false + ), patch("os.path.exists", MagicMock(return_value=True)): + yumpkg._get_yum_config() + assert "Could not find [main] section" in caplog.text + + def test_normalize_basedir_str(): basedir = "/etc/yum/yum.conf,/etc/yum.conf" result = yumpkg._normalize_basedir(basedir) From dd9a2bdfa91e2a320c8f25a038e899fde60e62f5 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 25 Sep 2023 13:14:00 -0600 Subject: [PATCH 024/196] Adjust ownership on log rotation --- pkg/common/logrotate/salt-common | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pkg/common/logrotate/salt-common b/pkg/common/logrotate/salt-common index 1bc063ebfdb..875c17e0cc6 100644 --- a/pkg/common/logrotate/salt-common +++ b/pkg/common/logrotate/salt-common @@ -4,7 +4,7 @@ rotate 7 compress notifempty - create 0640 salt salt + create 0640 } /var/log/salt/minion { @@ -13,6 +13,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/key { @@ -21,7 +22,7 @@ rotate 7 compress notifempty - create 0640 salt salt + create 0640 } /var/log/salt/api { @@ -30,7 +31,7 @@ rotate 7 compress notifempty - create 0640 salt salt + create 0640 } /var/log/salt/syndic { @@ -39,6 +40,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/proxy { @@ -47,4 +49,5 @@ rotate 7 compress notifempty + create 0640 } From 024eb3f10bf0c4685b094a476fc4ca05308dd118 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 25 Sep 2023 13:20:20 -0600 Subject: [PATCH 025/196] Added changelog for issue --- changelog/65288.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/65288.fixed.md diff --git a/changelog/65288.fixed.md b/changelog/65288.fixed.md new file mode 100644 index 00000000000..88581243382 --- /dev/null +++ b/changelog/65288.fixed.md @@ -0,0 +1 @@ +Preserve ownership on log rotation From de84120e589e9a592cc925db55386e1e4036ec44 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 29 Sep 2023 16:41:57 -0600 Subject: [PATCH 026/196] Initial log rotation test for ownership user and group --- pkg/tests/integration/test_salt_user.py | 112 ++++++++++++++++++++++++ 1 file changed, 112 insertions(+) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 9d2634962be..b07c6961fd4 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -170,3 +170,115 @@ def test_pkg_paths( else: assert file_path.owner() == "root" assert file_path.group() == "root" + + +def test_paths_log_rotation( + salt_master, salt_minion, salt_call_cli, install_salt, test_account +): + """ + Test the correct ownership is assigned when log rotation occurs + Change the user in the Salt Master, chage ownership, force logrotation + Check ownership and premissions. + Assumes test_pkg_paths successful + """ + if packaging.version.parse(install_salt.version) <= packaging.version.parse( + "3006.2" + ): + pytest.skip("Package path ownership was changed in salt 3006.3") + + # check that the salt_master is running + assert salt_master.is_running() + match = False + for proc in psutil.Process(salt_master.pid).children(): + assert proc.username() == "salt" + match = True + + assert match + + # Paths created by package installs with adjustment for current conf_dir /etc/salt + log_pkg_paths = [ + install_salt.conf_dir, + "/var/cache/salt", + "/var/log/salt", + "/var/run/salt", + "/opt/saltstack/salt", + ] + + # stop the salt_master, so can change user + with salt_master.stopped(): + assert salt_master.is_running() is False + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + "user: salt", + f"user: {test_account.username}", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # change ownership of appropriate paths to user + for _path in log_pkg_paths: + chg_ownership_cmd = ( + f"chown -R {test_account.username}:{test_account.username} {_path}" + ) + ret = salt_call_cli.run("--local", "cmd.run", chg_ownership_cmd) + assert ret.returncode == 0 + + # restart the salt_master + with salt_master.started(): + assert salt_master.is_running() is True + + # ensure some data in files + log_files_list = [ + "/var/log/salt/api", + "/var/log/salt/key", + "/var/log/salt/master", + ] + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + with log_path.open("a") as f: + f.write("This is a log rotation test\n") + + # force log rotation + logr_conf_file = "/etc/logrotate.d/salt" + logr_conf_path = pathlib.Path(logr_conf_file) + # assert logr_conf_path.exists() + if not logr_conf_path.exists(): + logr_conf_file = "/etc/logrotate.conf" + logr_conf_path = pathlib.Path(logr_conf_file) + assert logr_conf_path.exists() + + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + assert log_path.owner() == f"{test_account.username}" + assert log_path.group() == f"{test_account.username}" + assert log_path.stat().st_mode & 0o7777 == 0o640 + + # cleanup + # stop the salt_master + with salt_master.stopped(): + assert salt_master.is_running() is False + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + f"user: {test_account.username}", + "user: salt", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # change ownership of appropriate paths to user + for _path in log_pkg_paths: + chg_ownership_cmd = f"chown -R salt:salt {_path}" + ret = salt_call_cli.run("--local", "cmd.run", chg_ownership_cmd) + assert ret.returncode == 0 From af8e35ebce07737d55c35ff8427ce87823e2719d Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 2 Oct 2023 17:29:53 -0600 Subject: [PATCH 027/196] Package test for log rotation and ownership / permissions --- pkg/tests/integration/test_salt_user.py | 224 ++++++++++++++++-------- 1 file changed, 150 insertions(+), 74 deletions(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index b07c6961fd4..d1a8858f745 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -197,88 +197,164 @@ def test_paths_log_rotation( # Paths created by package installs with adjustment for current conf_dir /etc/salt log_pkg_paths = [ - install_salt.conf_dir, - "/var/cache/salt", - "/var/log/salt", - "/var/run/salt", - "/opt/saltstack/salt", + install_salt.conf_dir, # "bkup0" + "/var/cache/salt", # "bkup1" + "/var/log/salt", # "bkup2" + "/var/run/salt", # "bkup3" + "/opt/saltstack/salt", # "bkup4" ] - # stop the salt_master, so can change user - with salt_master.stopped(): - assert salt_master.is_running() is False + # backup those about to change + bkup_count = 0 + bkup_count_max = 5 + with temp_directory("bkup0") as temp_dir_path_0: + with temp_directory("bkup1") as temp_dir_path_1: + with temp_directory("bkup2") as temp_dir_path_2: + with temp_directory("bkup3") as temp_dir_path_3: + with temp_directory("bkup4") as temp_dir_path_4: - # change the user in the master's config file. - ret = salt_call_cli.run( - "--local", - "file.replace", - f"{install_salt.conf_dir}/master", - "user: salt", - f"user: {test_account.username}", - "flags=['IGNORECASE']", - "append_if_not_found=True", - ) - assert ret.returncode == 0 + assert temp_dir_path_0.is_dir() + assert temp_dir_path_1.is_dir() + assert temp_dir_path_2.is_dir() + assert temp_dir_path_3.is_dir() + assert temp_dir_path_4.is_dir() - # change ownership of appropriate paths to user - for _path in log_pkg_paths: - chg_ownership_cmd = ( - f"chown -R {test_account.username}:{test_account.username} {_path}" - ) - ret = salt_call_cli.run("--local", "cmd.run", chg_ownership_cmd) - assert ret.returncode == 0 + # stop the salt_master, so can change user + with salt_master.stopped(): + assert salt_master.is_running() is False - # restart the salt_master - with salt_master.started(): - assert salt_master.is_running() is True + for _path in log_pkg_paths: + if bkup_count == 0: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_0)}/" + ) + elif bkup_count == 1: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_1)}/" + ) + elif bkup_count == 2: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_2)}/" + ) + elif bkup_count == 3: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_3)}/" + ) + elif bkup_count == 4: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_4)}/" + ) + elif bkup_count > 5: + assert bkupcount < bkup_count_max # force assertion - # ensure some data in files - log_files_list = [ - "/var/log/salt/api", - "/var/log/salt/key", - "/var/log/salt/master", - ] - for _path in log_files_list: - log_path = pathlib.Path(_path) - assert log_path.exists() - with log_path.open("a") as f: - f.write("This is a log rotation test\n") + ret = salt_call_cli.run( + "--local", "cmd.run", cmd_to_run + ) + bkup_count += 1 + assert ret.returncode == 0 - # force log rotation - logr_conf_file = "/etc/logrotate.d/salt" - logr_conf_path = pathlib.Path(logr_conf_file) - # assert logr_conf_path.exists() - if not logr_conf_path.exists(): - logr_conf_file = "/etc/logrotate.conf" - logr_conf_path = pathlib.Path(logr_conf_file) - assert logr_conf_path.exists() + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + "user: salt", + f"user: {test_account.username}", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 - for _path in log_files_list: - log_path = pathlib.Path(_path) - assert log_path.exists() - assert log_path.owner() == f"{test_account.username}" - assert log_path.group() == f"{test_account.username}" - assert log_path.stat().st_mode & 0o7777 == 0o640 + # change ownership of appropriate paths to user + for _path in log_pkg_paths: + chg_ownership_cmd = f"chown -R {test_account.username}:{test_account.username} {_path}" + ret = salt_call_cli.run( + "--local", "cmd.run", chg_ownership_cmd + ) + assert ret.returncode == 0 - # cleanup - # stop the salt_master - with salt_master.stopped(): - assert salt_master.is_running() is False + # restart the salt_master + with salt_master.started(): + assert salt_master.is_running() is True - # change the user in the master's config file. - ret = salt_call_cli.run( - "--local", - "file.replace", - f"{install_salt.conf_dir}/master", - f"user: {test_account.username}", - "user: salt", - "flags=['IGNORECASE']", - "append_if_not_found=True", - ) - assert ret.returncode == 0 + # ensure some data in files + log_files_list = [ + "/var/log/salt/api", + "/var/log/salt/key", + "/var/log/salt/master", + ] + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + with log_path.open("a") as f: + f.write("This is a log rotation test\n") - # change ownership of appropriate paths to user - for _path in log_pkg_paths: - chg_ownership_cmd = f"chown -R salt:salt {_path}" - ret = salt_call_cli.run("--local", "cmd.run", chg_ownership_cmd) - assert ret.returncode == 0 + # force log rotation + logr_conf_file = "/etc/logrotate.d/salt" + logr_conf_path = pathlib.Path(logr_conf_file) + # assert logr_conf_path.exists() + if not logr_conf_path.exists(): + logr_conf_file = "/etc/logrotate.conf" + logr_conf_path = pathlib.Path(logr_conf_file) + assert logr_conf_path.exists() + + # force log rotation + log_rotate_cmd = f"logrotate -f {str(logr_conf_file)}" + ret = salt_call_cli.run( + "--local", "cmd.run", log_rotate_cmd + ) + assert ret.returncode == 0 + + for _path in log_files_list: + log_path = pathlib.Path(_path) + str_log_path = str(log_path) + ret = salt_call_cli.run( + "--local", "cmd.run", f"ls -alh {str_log_path}" + ) + assert log_path.exists() + assert ( + log_path.owner() == f"{test_account.username}" + ) + assert ( + log_path.group() == f"{test_account.username}" + ) + assert log_path.stat().st_mode & 0o7777 == 0o640 + + # cleanup + assert salt_master.is_running() is False + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + f"user: {test_account.username}", + "user: salt", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # restore from backed up + bkup_count = 0 + for _path in log_pkg_paths: + if bkup_count == 0: + cmd_to_run = f"cp -a --force {str(temp_dir_path_0)}/* {_path}/" + elif bkup_count == 1: + cmd_to_run = f"cp -a --force {str(temp_dir_path_1)}/* {_path}/" + elif bkup_count == 2: + cmd_to_run = f"cp -a --force {str(temp_dir_path_2)}/* {_path}/" + elif bkup_count == 3: + cmd_to_run = f"cp -a --force {str(temp_dir_path_3)}/* {_path}/" + elif bkup_count == 4: + # use --update since /opt/saltstack/salt and would get SIGSEGV since mucking with running code + cmd_to_run = f"cp -a --update --force {str(temp_dir_path_4)}/* {_path}/" + elif bkup_count > 5: + assert bkupcount < bkup_count_max # force assertion + + ret = salt_call_cli.run( + "--local", "cmd.run", cmd_to_run + ) + + bkup_count += 1 + assert ret.returncode == 0 From 9b6e493db2574bf84ee38198e06b8e9e3d2d3e26 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 3 Oct 2023 09:37:27 -0600 Subject: [PATCH 028/196] Added missing include for temp_directory --- pkg/tests/integration/test_salt_user.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index d1a8858f745..3e8cd88effb 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -6,6 +6,7 @@ import sys import packaging.version import psutil import pytest +from saltfactories.utils.tempfiles import temp_directory pytestmark = [ pytest.mark.skip_on_windows, From 7c4f68b7744cc5d8b319b95c3c10b202d061bc45 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 3 Oct 2023 13:58:47 -0600 Subject: [PATCH 029/196] Updated test to only run on RedHat family, Ubuntu/Debian has issue 65231 --- pkg/tests/integration/test_salt_user.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 3e8cd88effb..f5c89ffacf0 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -187,6 +187,11 @@ def test_paths_log_rotation( ): pytest.skip("Package path ownership was changed in salt 3006.3") + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora", "photon"): + pytest.skip( + "Only tests RedHat family packages till logrotation paths are resolved on Ubuntu/Debian, see issue 65231" + ) + # check that the salt_master is running assert salt_master.is_running() match = False From 6d7c2f0dd13665580411522b59dbf3b2156936ae Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 12 Oct 2023 11:32:28 -0600 Subject: [PATCH 030/196] Updated version check in test to prevent running on downgrade --- pkg/tests/integration/test_salt_user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index f5c89ffacf0..880fcefdfd2 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -183,7 +183,7 @@ def test_paths_log_rotation( Assumes test_pkg_paths successful """ if packaging.version.parse(install_salt.version) <= packaging.version.parse( - "3006.2" + "3006.3" ): pytest.skip("Package path ownership was changed in salt 3006.3") From cd236c42d95879b6cb00d0714237ab14cc64404f Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 12 Oct 2023 18:27:59 -0600 Subject: [PATCH 031/196] Limited test ownership to id and no longer change group on files --- pkg/tests/integration/test_salt_user.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 880fcefdfd2..4d9b31a0a44 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -273,7 +273,9 @@ def test_paths_log_rotation( # change ownership of appropriate paths to user for _path in log_pkg_paths: - chg_ownership_cmd = f"chown -R {test_account.username}:{test_account.username} {_path}" + chg_ownership_cmd = ( + f"chown -R {test_account.username} {_path}" + ) ret = salt_call_cli.run( "--local", "cmd.run", chg_ownership_cmd ) From 0ff32b01de3d627fa32b74de30e127cf5a8c9ad7 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 16 Oct 2023 09:05:26 -0600 Subject: [PATCH 032/196] Removed group membership check from the test --- pkg/tests/integration/test_salt_user.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 4d9b31a0a44..c19b4c23e53 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -323,9 +323,6 @@ def test_paths_log_rotation( assert ( log_path.owner() == f"{test_account.username}" ) - assert ( - log_path.group() == f"{test_account.username}" - ) assert log_path.stat().st_mode & 0o7777 == 0o640 # cleanup From 3cee13afdaf7c6b7d881eaab929b8c5a1ea58f34 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 24 Oct 2023 17:36:25 -0600 Subject: [PATCH 033/196] Remove Photon OS from test --- pkg/tests/integration/test_salt_user.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index c19b4c23e53..acca2690437 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -187,7 +187,7 @@ def test_paths_log_rotation( ): pytest.skip("Package path ownership was changed in salt 3006.3") - if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora", "photon"): + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): pytest.skip( "Only tests RedHat family packages till logrotation paths are resolved on Ubuntu/Debian, see issue 65231" ) From 57b02f49737f278a501db9db7034d9caacd53373 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 1 Nov 2023 10:37:30 -0600 Subject: [PATCH 034/196] Updated tests per reviewer comments --- pkg/tests/integration/test_salt_user.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index acca2690437..02e59590279 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -173,6 +173,7 @@ def test_pkg_paths( assert file_path.group() == "root" +@pytest.mark.skip_if_binaries_missing("logrotate") def test_paths_log_rotation( salt_master, salt_minion, salt_call_cli, install_salt, test_account ): @@ -300,7 +301,6 @@ def test_paths_log_rotation( # force log rotation logr_conf_file = "/etc/logrotate.d/salt" logr_conf_path = pathlib.Path(logr_conf_file) - # assert logr_conf_path.exists() if not logr_conf_path.exists(): logr_conf_file = "/etc/logrotate.conf" logr_conf_path = pathlib.Path(logr_conf_file) @@ -315,10 +315,6 @@ def test_paths_log_rotation( for _path in log_files_list: log_path = pathlib.Path(_path) - str_log_path = str(log_path) - ret = salt_call_cli.run( - "--local", "cmd.run", f"ls -alh {str_log_path}" - ) assert log_path.exists() assert ( log_path.owner() == f"{test_account.username}" From a75f0a9f41dbad8b86b25996663bba11516ea708 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 1 Nov 2023 12:17:13 -0600 Subject: [PATCH 035/196] Updated test per reviewer comments --- pkg/tests/integration/test_salt_user.py | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 02e59590279..2a3ed957041 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -307,7 +307,7 @@ def test_paths_log_rotation( assert logr_conf_path.exists() # force log rotation - log_rotate_cmd = f"logrotate -f {str(logr_conf_file)}" + log_rotate_cmd = f"logrotate -f {logr_conf_file}" ret = salt_call_cli.run( "--local", "cmd.run", log_rotate_cmd ) @@ -316,9 +316,7 @@ def test_paths_log_rotation( for _path in log_files_list: log_path = pathlib.Path(_path) assert log_path.exists() - assert ( - log_path.owner() == f"{test_account.username}" - ) + assert log_path.owner() == test_account.username assert log_path.stat().st_mode & 0o7777 == 0o640 # cleanup From 9f8fc27cbaf3a7c9a14aa9c96cdd577d2ba17376 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 6 Nov 2023 15:35:37 -0700 Subject: [PATCH 036/196] Updated check version of Salt for the test --- pkg/tests/integration/test_salt_user.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pkg/tests/integration/test_salt_user.py b/pkg/tests/integration/test_salt_user.py index 2a3ed957041..f785c6854d2 100644 --- a/pkg/tests/integration/test_salt_user.py +++ b/pkg/tests/integration/test_salt_user.py @@ -136,9 +136,9 @@ def test_pkg_paths( Test package paths ownership """ if packaging.version.parse(install_salt.version) <= packaging.version.parse( - "3006.2" + "3006.4" ): - pytest.skip("Package path ownership was changed in salt 3006.3") + pytest.skip("Package path ownership was changed in salt 3006.4") salt_user_subdirs = [] for _path in pkg_paths: pkg_path = pathlib.Path(_path) @@ -184,9 +184,9 @@ def test_paths_log_rotation( Assumes test_pkg_paths successful """ if packaging.version.parse(install_salt.version) <= packaging.version.parse( - "3006.3" + "3006.4" ): - pytest.skip("Package path ownership was changed in salt 3006.3") + pytest.skip("Package path ownership was changed in salt 3006.4") if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): pytest.skip( From 3b5dc02e700a22ba202fb39d29a0458973833a3d Mon Sep 17 00:00:00 2001 From: twangboy Date: Tue, 26 Sep 2023 14:34:34 -0600 Subject: [PATCH 037/196] Add some tests for client/__init__.py --- tests/pytests/unit/client/test_init.py | 271 +++++++++++++++++++++++++ 1 file changed, 271 insertions(+) create mode 100644 tests/pytests/unit/client/test_init.py diff --git a/tests/pytests/unit/client/test_init.py b/tests/pytests/unit/client/test_init.py new file mode 100644 index 00000000000..6a10bfef7e1 --- /dev/null +++ b/tests/pytests/unit/client/test_init.py @@ -0,0 +1,271 @@ +import pytest + +import salt.client +from salt.exceptions import SaltInvocationError + + +@pytest.fixture +def local_client(): + return salt.client.get_local_client() + + +def test_get_local_client(local_client): + assert isinstance(local_client, salt.client.LocalClient) + + +def test_get_local_client_mopts(master_opts): + master_opts["rest_cherrypy"] = {"port": 8000} + local_client = salt.client.get_local_client(mopts=master_opts) + assert isinstance(local_client, salt.client.LocalClient) + + +@pytest.mark.parametrize( + "val, expected", + ((None, 5), (7, 7), ("9", 9), ("eleven", 5), (["13"], 5)), +) +def test_local_client_get_timeout(local_client, val, expected): + assert local_client._get_timeout(timeout=val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("group1", ["L@spongebob,patrick"]), + ("group2", ["G@os:squidward"]), + ("group3", ["(", "G@os:plankton", "and", "(", "L@spongebob,patrick", ")", ")"]), + ), +) +def test_resolve_nodegroup(master_opts, val, expected): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + assert local_client._resolve_nodegroup(val) == expected + + +def test_resolve_nodegroup_error(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + with pytest.raises(SaltInvocationError): + local_client._resolve_nodegroup("missing") + + +def test_prep_pub(local_client): + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_kwargs(local_client): + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + some_kwarg="spongebob", + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "user": local_client.salt_user, + "kwargs": { + "some_kwarg": "spongebob", + }, + } + assert result == expected + + +def test_prep_pub_order_masters(master_opts): + master_opts["order_masters"] = True + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "to": 7, + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_nodegroup(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="group1", + fun="test.ping", + arg="", + tgt_type="nodegroup", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "L@spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_compound(local_client): + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="compound", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_compound_nodegroup(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="N@group1", + fun="test.ping", + arg="", + tgt_type="compound", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "L@spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_ext_job_cache(master_opts): + master_opts["ext_job_cache"] = "mysql" + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "mysql", + "tgt": "spongebob,patrick", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_ext_job_cache_existing(master_opts): + master_opts["ext_job_cache"] = "mysql" + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="glob", + ret="postgres", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "postgres,mysql", + "tgt": "spongebob,patrick", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected From c8e5547fa72309db00ea688707996ee2e7c629fd Mon Sep 17 00:00:00 2001 From: twangboy Date: Wed, 11 Oct 2023 11:36:49 -0600 Subject: [PATCH 038/196] Add additional client assertion --- tests/pytests/unit/client/test_init.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/pytests/unit/client/test_init.py b/tests/pytests/unit/client/test_init.py index 6a10bfef7e1..099fa1ebb9e 100644 --- a/tests/pytests/unit/client/test_init.py +++ b/tests/pytests/unit/client/test_init.py @@ -17,6 +17,7 @@ def test_get_local_client_mopts(master_opts): master_opts["rest_cherrypy"] = {"port": 8000} local_client = salt.client.get_local_client(mopts=master_opts) assert isinstance(local_client, salt.client.LocalClient) + assert local_client.opts == master_opts @pytest.mark.parametrize( From 9b59adc16f65e948bafb0646489827185da552df Mon Sep 17 00:00:00 2001 From: twangboy Date: Thu, 12 Oct 2023 09:49:16 -0600 Subject: [PATCH 039/196] Add comment to test --- tests/pytests/unit/client/test_init.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/pytests/unit/client/test_init.py b/tests/pytests/unit/client/test_init.py index 099fa1ebb9e..90fb91b0070 100644 --- a/tests/pytests/unit/client/test_init.py +++ b/tests/pytests/unit/client/test_init.py @@ -10,6 +10,9 @@ def local_client(): def test_get_local_client(local_client): + """ + Test that a local client is created + """ assert isinstance(local_client, salt.client.LocalClient) From 46f3e393328766bb5fa917629059fee983d41af3 Mon Sep 17 00:00:00 2001 From: ScriptAutomate Date: Tue, 10 Oct 2023 13:01:18 -0500 Subject: [PATCH 040/196] Add Amazon Linux 2023; update Amazon Linux 2 AMI --- .github/workflows/ci.yml | 173 +++++++++++++++++ .github/workflows/nightly.yml | 181 ++++++++++++++++++ .github/workflows/release.yml | 36 ++++ .github/workflows/scheduled.yml | 173 +++++++++++++++++ .github/workflows/staging.yml | 178 +++++++++++++++++ .../templates/build-rpm-repo.yml.jinja | 2 + .../test-package-downloads-action.yml | 6 + changelog/64455.added.md | 1 + tools/ci.py | 4 + tools/pre_commit.py | 8 + 10 files changed, 762 insertions(+) create mode 100644 changelog/64455.added.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 1c2ff7aa2da..bf6eba7efd8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -797,6 +797,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1278,6 +1312,72 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1982,6 +2082,66 @@ jobs: workflow-slug: ci default-timeout: 180 + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + archlinux-lts: name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2482,6 +2642,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2516,6 +2678,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2673,6 +2838,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2707,6 +2874,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2730,6 +2900,9 @@ jobs: - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 41108ef0e10..0921cf186a3 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -858,6 +858,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1339,6 +1373,72 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2043,6 +2143,66 @@ jobs: workflow-slug: nightly default-timeout: 360 + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + archlinux-lts: name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2543,6 +2703,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2577,6 +2739,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2949,6 +3114,14 @@ jobs: distro: amazon version: "2" arch: aarch64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: aarch64 - pkg-type: rpm distro: redhat version: "7" @@ -3425,6 +3598,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -3459,6 +3634,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -3543,6 +3721,9 @@ jobs: - combine-all-code-coverage - publish-repositories - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 0a31e7601e2..6b7b0037a8d 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -321,6 +321,38 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps needs: @@ -826,6 +858,8 @@ jobs: - almalinux-9-ci-deps - amazonlinux-2-arm64-ci-deps - amazonlinux-2-ci-deps + - amazonlinux-2023-arm64-ci-deps + - amazonlinux-2023-ci-deps - centos-7-arm64-ci-deps - centos-7-ci-deps - centosstream-8-arm64-ci-deps @@ -1050,6 +1084,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 8feb2ec7870..9892a6a92a0 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -831,6 +831,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1312,6 +1346,72 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2016,6 +2116,66 @@ jobs: workflow-slug: scheduled default-timeout: 360 + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + archlinux-lts: name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2516,6 +2676,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2550,6 +2712,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2709,6 +2874,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -2743,6 +2910,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -2766,6 +2936,9 @@ jobs: - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index fdad325bee2..3d8fb0bfe5f 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -853,6 +853,40 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + archlinux-lts-ci-deps: name: Arch Linux LTS Deps if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -1334,6 +1368,72 @@ jobs: skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centos-7-pkg-tests: name: CentOS 7 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2038,6 +2138,66 @@ jobs: workflow-slug: staging default-timeout: 180 + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + archlinux-lts: name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2769,6 +2929,14 @@ jobs: distro: amazon version: "2" arch: aarch64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: aarch64 - pkg-type: rpm distro: redhat version: "7" @@ -3349,6 +3517,8 @@ jobs: - almalinux-9-ci-deps - amazonlinux-2-arm64-ci-deps - amazonlinux-2-ci-deps + - amazonlinux-2023-arm64-ci-deps + - amazonlinux-2023-ci-deps - centos-7-arm64-ci-deps - centos-7-ci-deps - centosstream-8-arm64-ci-deps @@ -3407,6 +3577,8 @@ jobs: - almalinux-9-arm64-ci-deps - amazonlinux-2-ci-deps - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps - archlinux-lts-ci-deps - centos-7-ci-deps - centos-7-arm64-ci-deps @@ -3441,6 +3613,9 @@ jobs: - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -3464,6 +3639,9 @@ jobs: - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 405461a99b3..208f2096301 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -6,6 +6,8 @@ <%- for distro, version, arch in ( ("amazon", "2", "x86_64"), ("amazon", "2", "aarch64"), + ("amazon", "2023", "x86_64"), + ("amazon", "2023", "aarch64"), ("redhat", "7", "x86_64"), ("redhat", "7", "aarch64"), ("redhat", "8", "x86_64"), diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index 7ca255f79ba..b90e17f2d57 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -83,6 +83,12 @@ jobs: - distro-slug: amazonlinux-2-arm64 arch: aarch64 pkg-type: package + - distro-slug: amazonlinux-2023 + arch: x86_64 + pkg-type: package + - distro-slug: amazonlinux-2023-arm64 + arch: aarch64 + pkg-type: package - distro-slug: centos-7 arch: x86_64 pkg-type: package diff --git a/changelog/64455.added.md b/changelog/64455.added.md new file mode 100644 index 00000000000..8885a93e59f --- /dev/null +++ b/changelog/64455.added.md @@ -0,0 +1 @@ +Added Salt support for Amazon Linux 2023 diff --git a/tools/ci.py b/tools/ci.py index e376105ea63..916ea0b4e2d 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -724,6 +724,8 @@ def pkg_matrix( if ( distro_slug not in [ + "amazon-2023", + "amazon-2023-arm64", "debian-11-arm64", # TODO: remove debian 12 once debian 12 pkgs are released "debian-12-arm64", @@ -765,6 +767,8 @@ def pkg_matrix( if ( distro_slug not in [ + "amazon-2023", + "amazon-2023-arm64", "centosstream-9", "debian-11-arm64", "debian-12-arm64", diff --git a/tools/pre_commit.py b/tools/pre_commit.py index fb6f70303a1..7e86b69fdb4 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -89,6 +89,9 @@ def generate_workflows(ctx: Context): ("almalinux-8", "Alma Linux 8", "x86_64"), ("almalinux-9", "Alma Linux 9", "x86_64"), ("amazonlinux-2", "Amazon Linux 2", "x86_64"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64"), ("archlinux-lts", "Arch Linux LTS", "x86_64"), ("centos-7", "CentOS 7", "x86_64"), ("centosstream-8", "CentOS Stream 8", "x86_64"), @@ -122,6 +125,9 @@ def generate_workflows(ctx: Context): test_salt_pkg_listing = { "linux": ( ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "rpm"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64", "rpm"), ("centos-7", "CentOS 7", "x86_64", "rpm"), ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"), ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"), @@ -156,6 +162,8 @@ def generate_workflows(ctx: Context): ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"), ("amazonlinux-2", "Amazon Linux 2", "x86_64"), ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64"), ("archlinux-lts", "Arch Linux LTS", "x86_64"), ("centos-7", "CentOS 7", "x86_64"), ("centos-7-arm64", "CentOS 7 Arm64", "aarch64"), From 324a1519c1220616c83571665312686e7401c1b1 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Tue, 17 Oct 2023 12:59:51 -0600 Subject: [PATCH 041/196] Fix amazon linux 2023 64 bit tests and only run install package tests --- pkg/tests/integration/test_pkg.py | 2 ++ pkg/tests/integration/test_systemd_config.py | 1 + tools/ci.py | 5 +++++ 3 files changed, 8 insertions(+) diff --git a/pkg/tests/integration/test_pkg.py b/pkg/tests/integration/test_pkg.py index 5aedefa6ef1..4dcc1c997ed 100644 --- a/pkg/tests/integration/test_pkg.py +++ b/pkg/tests/integration/test_pkg.py @@ -14,6 +14,8 @@ def pkg_name(salt_call_cli, grains): elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": return "snoopy" + elif grains["os"] == "Amazon": + return "dnf-utils" return "units" elif grains["os_family"] == "Debian": return "ifenslave" diff --git a/pkg/tests/integration/test_systemd_config.py b/pkg/tests/integration/test_systemd_config.py index 05a4c852cb6..0ac1379b94b 100644 --- a/pkg/tests/integration/test_systemd_config.py +++ b/pkg/tests/integration/test_systemd_config.py @@ -25,6 +25,7 @@ def test_system_config(salt_cli, salt_minion): "VMware Photon OS-3", "VMware Photon OS-4", "VMware Photon OS-5", + "Amazon Linux-2023", ): ret = subprocess.call( "systemctl show -p ${config} salt-minion.service", shell=True diff --git a/tools/ci.py b/tools/ci.py index 916ea0b4e2d..9802eb3405e 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -730,6 +730,9 @@ def pkg_matrix( # TODO: remove debian 12 once debian 12 pkgs are released "debian-12-arm64", "debian-12", + # TODO: remove amazon 2023 once amazon 2023 pkgs are released + "amazonlinux-2023", + "amazonlinux-2023-arm64", "ubuntu-20.04-arm64", "ubuntu-22.04-arm64", "photonos-3", @@ -773,6 +776,8 @@ def pkg_matrix( "debian-11-arm64", "debian-12-arm64", "debian-12", + "amazonlinux-2023", + "amazonlinux-2023-arm64", "ubuntu-22.04", "ubuntu-22.04-arm64", "photonos-3", From 1c312ca9c086b34c662fcaf66bdf75e573dbada7 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Wed, 18 Oct 2023 12:49:59 -0600 Subject: [PATCH 042/196] Fix amazon linux 2023 tests --- pkg/tests/integration/test_pkg.py | 2 +- pkg/tests/integration/test_version.py | 3 ++- .../pytests/functional/states/pkgrepo/test_centos.py | 11 ++++++----- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/pkg/tests/integration/test_pkg.py b/pkg/tests/integration/test_pkg.py index 4dcc1c997ed..6e90e0a9349 100644 --- a/pkg/tests/integration/test_pkg.py +++ b/pkg/tests/integration/test_pkg.py @@ -14,7 +14,7 @@ def pkg_name(salt_call_cli, grains): elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": return "snoopy" - elif grains["os"] == "Amazon": + elif grains["osfinger"] == "Amazon Linux-2023": return "dnf-utils" return "units" elif grains["os_family"] == "Debian": diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py index 2c3c539ca40..d559b060665 100644 --- a/pkg/tests/integration/test_version.py +++ b/pkg/tests/integration/test_version.py @@ -1,5 +1,6 @@ import os.path import pathlib +import re import subprocess import pytest @@ -117,7 +118,7 @@ def test_compare_pkg_versions_redhat_rc(version, install_salt): if not pkg: pytest.skip("Not testing rpm packages") pkg = pkg[0].split("/")[-1] - if "rc" not in pkg: + if not re.search(r"rc[0-9]", pkg): pytest.skip("Not testing an RC package") assert "~" in pkg comp_pkg = pkg.split("~")[0] diff --git a/tests/pytests/functional/states/pkgrepo/test_centos.py b/tests/pytests/functional/states/pkgrepo/test_centos.py index 6a84f96ac98..67327d8c6d9 100644 --- a/tests/pytests/functional/states/pkgrepo/test_centos.py +++ b/tests/pytests/functional/states/pkgrepo/test_centos.py @@ -237,12 +237,13 @@ def test_pkgrepo_with_comments(pkgrepo, pkgrepo_with_comments_name, subtests): @pytest.fixture def copr_pkgrepo_with_comments_name(pkgrepo, grains): - if ( - grains["osfinger"] in ("CentOS Linux-7", "Amazon Linux-2") - or grains["os"] == "VMware Photon OS" - ): + if grains["osfinger"] in ("CentOS Linux-7") or grains["os"] == "VMware Photon OS": pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"])) - if grains["os"] in ("CentOS Stream", "AlmaLinux") and grains["osmajorrelease"] == 9: + if ( + grains["os"] in ("CentOS Stream", "AlmaLinux") + and grains["osmajorrelease"] == 9 + or grains["osfinger"] == "Amazon Linux-2023" + ): pytest.skip("No repo for {} in test COPR yet".format(grains["osfinger"])) pkgrepo_name = "hello-copr" try: From 66bdc36a31f1ed92f718478ab48e9a0fc3b99742 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Thu, 19 Oct 2023 13:57:58 -0600 Subject: [PATCH 043/196] Fix amazon linux 2 tests --- pkg/tests/support/helpers.py | 2 +- tests/pytests/functional/modules/test_pkg.py | 2 ++ tests/pytests/functional/states/pkgrepo/test_centos.py | 6 +++++- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 7cc96fee175..45d0f91ce1a 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -516,7 +516,7 @@ class SaltPkgInstall: gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.pub" if platform.is_aarch64(): - arch = "aarch64" + arch = "arm64" else: arch = "x86_64" ret = self.proc.run( diff --git a/tests/pytests/functional/modules/test_pkg.py b/tests/pytests/functional/modules/test_pkg.py index 44769d84c97..8e16fd3fdc4 100644 --- a/tests/pytests/functional/modules/test_pkg.py +++ b/tests/pytests/functional/modules/test_pkg.py @@ -64,6 +64,8 @@ def test_pkg(grains): elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": _pkg = "snoopy" + elif grains["osfinger"] == "Amazon Linux-2023": + return "dnf-utils" else: _pkg = "units" elif grains["os_family"] == "Debian": diff --git a/tests/pytests/functional/states/pkgrepo/test_centos.py b/tests/pytests/functional/states/pkgrepo/test_centos.py index 67327d8c6d9..81500b8bd55 100644 --- a/tests/pytests/functional/states/pkgrepo/test_centos.py +++ b/tests/pytests/functional/states/pkgrepo/test_centos.py @@ -237,7 +237,11 @@ def test_pkgrepo_with_comments(pkgrepo, pkgrepo_with_comments_name, subtests): @pytest.fixture def copr_pkgrepo_with_comments_name(pkgrepo, grains): - if grains["osfinger"] in ("CentOS Linux-7") or grains["os"] == "VMware Photon OS": + if ( + grains["osfinger"] in ("CentOS Linux-7") + or grains["os"] == "VMware Photon OS" + or grains["osfinger"] == "Amazon Linux-2" + ): pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"])) if ( grains["os"] in ("CentOS Stream", "AlmaLinux") From 1195971ffdb8761508ae3dbfffcb924240b0bead Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 20 Oct 2023 13:38:09 +0100 Subject: [PATCH 044/196] Simplify logic Signed-off-by: Pedro Algarvio --- tests/pytests/functional/states/pkgrepo/test_centos.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/pytests/functional/states/pkgrepo/test_centos.py b/tests/pytests/functional/states/pkgrepo/test_centos.py index 81500b8bd55..c02da519d2f 100644 --- a/tests/pytests/functional/states/pkgrepo/test_centos.py +++ b/tests/pytests/functional/states/pkgrepo/test_centos.py @@ -238,9 +238,8 @@ def test_pkgrepo_with_comments(pkgrepo, pkgrepo_with_comments_name, subtests): @pytest.fixture def copr_pkgrepo_with_comments_name(pkgrepo, grains): if ( - grains["osfinger"] in ("CentOS Linux-7") + grains["osfinger"] in ("CentOS Linux-7", "Amazon Linux-2") or grains["os"] == "VMware Photon OS" - or grains["osfinger"] == "Amazon Linux-2" ): pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"])) if ( From a199b5b4543c5be38dd562e4ca9c468607236fb6 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Fri, 20 Oct 2023 09:19:10 -0600 Subject: [PATCH 045/196] Only run amazon linux2 package tests for >=3006 --- tools/ci.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 9802eb3405e..81d059466ba 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -721,6 +721,12 @@ def pkg_matrix( sessions = [ "install", ] + # OSs that where never included in 3005 + # We cannot test an upgrade for this OS on this version + not_3005 = ["amazonlinux-2-arm64", "photonos-5", "photonos-5-arm64"] + # OSs that where never included in 3006 + # We cannot test an upgrade for this OS on this version + not_3006 = ["photonos-5", "photonos-5-arm64"] if ( distro_slug not in [ @@ -806,10 +812,17 @@ def pkg_matrix( for version in versions: if ( version - and distro_slug.startswith("photonos-5") + and distro_slug in not_3005 + and version < tools.utils.Version("3006.0") + ): + # We never build packages for these OSs in 3005 + continue + elif ( + version + and distro_slug in not_3006 and version < tools.utils.Version("3007.0") ): - # We never build packages for Photon OS 5 prior to 3007.0 + # We never build packages for these OSs in 3006 continue _matrix.append( { From fe368f416696fca2f7e9430aa81d135a8cc31296 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Fri, 3 Nov 2023 10:20:06 -0600 Subject: [PATCH 046/196] update package name in tests for amazon linux 2023 --- tests/pytests/functional/states/test_pkg.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py index 30a0e8e1c36..fd7e1c57d80 100644 --- a/tests/pytests/functional/states/test_pkg.py +++ b/tests/pytests/functional/states/test_pkg.py @@ -43,7 +43,10 @@ def PKG_TARGETS(grains): if grains["os"] == "Windows": _PKG_TARGETS = ["vlc", "putty"] elif grains["os"] == "Amazon": - _PKG_TARGETS = ["lynx", "gnuplot"] + if grains["osfinger"] == "Amazon Linux-2023": + _PKG_TARGETS = ["lynx", "gnuplot-minimal"] + else: + _PKG_TARGETS = ["lynx", "gnuplot"] elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": if grains["osmajorrelease"] >= 5: From ecd29f5a22a61f165922d814c1ea802dd89bce58 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Fri, 3 Nov 2023 10:41:55 -0600 Subject: [PATCH 047/196] Fix pre-commit --- .github/workflows/ci.yml | 6 ++++++ .github/workflows/nightly.yml | 6 ++++++ .github/workflows/scheduled.yml | 6 ++++++ .github/workflows/staging.yml | 6 ++++++ 4 files changed, 24 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bf6eba7efd8..b91e9f780cb 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2101,6 +2101,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 amazonlinux-2023: name: Amazon Linux 2023 Test @@ -2121,6 +2123,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test @@ -2141,6 +2145,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 archlinux-lts: name: Arch Linux LTS Test diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 0921cf186a3..12405289210 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2162,6 +2162,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 amazonlinux-2023: name: Amazon Linux 2023 Test @@ -2182,6 +2184,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test @@ -2202,6 +2206,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 archlinux-lts: name: Arch Linux LTS Test diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 9892a6a92a0..cf7d7af20df 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -2135,6 +2135,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 amazonlinux-2023: name: Amazon Linux 2023 Test @@ -2155,6 +2157,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test @@ -2175,6 +2179,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 archlinux-lts: name: Arch Linux LTS Test diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 3d8fb0bfe5f..a06ed67a46f 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2157,6 +2157,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 amazonlinux-2023: name: Amazon Linux 2023 Test @@ -2177,6 +2179,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 amazonlinux-2023-arm64: name: Amazon Linux 2023 Arm64 Test @@ -2197,6 +2201,8 @@ jobs: cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 archlinux-lts: name: Arch Linux LTS Test From 404a659a39d8327b4555f787c962f249920848b5 Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Mon, 6 Nov 2023 10:42:29 -0700 Subject: [PATCH 048/196] Add download flag to tool's rsync cmd --- tools/vm.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/tools/vm.py b/tools/vm.py index 97cd52f9f37..40f5d7f6bce 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -222,14 +222,18 @@ def ssh(ctx: Context, name: str, command: list[str], sudo: bool = False): "help": "The VM Name", "metavar": "VM_NAME", }, + "download": { + "help": "Rsync from the remote target to local salt checkout", + "action": "store_true", + }, } ) -def rsync(ctx: Context, name: str): +def rsync(ctx: Context, name: str, download: bool = False): """ Sync local checkout to VM. """ vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) - vm.upload_checkout() + vm.upload_checkout(download=download) @vm.command( @@ -1293,7 +1297,7 @@ class VM: shutil.rmtree(self.state_dir, ignore_errors=True) self.instance = None - def upload_checkout(self, verbose=True): + def upload_checkout(self, verbose=True, download=False): rsync_flags = [ "--delete", "--no-group", @@ -1335,7 +1339,10 @@ class VM: rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") destination = f"{self.name}:{rsync_remote_path}" description = "Rsync local checkout to VM..." - self.rsync(source, destination, description, rsync_flags) + if download: + self.rsync(destination + "/*", source, description, rsync_flags) + else: + self.rsync(source, destination, description, rsync_flags) if self.is_windows: # rsync sets very strict file permissions and disables inheritance # we only need to reset permissions so they inherit from the parent From 2b142dc81b48cee8a46fdf8eb390cbba8cb7f429 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 9 Nov 2023 17:24:49 +0000 Subject: [PATCH 049/196] Update tools/vm.py --- tools/vm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/vm.py b/tools/vm.py index 40f5d7f6bce..33a230b7de3 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -1340,7 +1340,7 @@ class VM: destination = f"{self.name}:{rsync_remote_path}" description = "Rsync local checkout to VM..." if download: - self.rsync(destination + "/*", source, description, rsync_flags) + self.rsync(f"{destination}/*", source, description, rsync_flags) else: self.rsync(source, destination, description, rsync_flags) if self.is_windows: From 945137bd10aa0d985d225c711b1ac6989ca98986 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 4 Oct 2023 17:13:51 -0600 Subject: [PATCH 050/196] Updated tests for saltutil for code-coverage --- .../integration/runners/test_saltutil.py | 30 ++++++++ tests/pytests/unit/modules/test_saltutil.py | 69 ++++++++++++++++++- 2 files changed, 98 insertions(+), 1 deletion(-) diff --git a/tests/pytests/integration/runners/test_saltutil.py b/tests/pytests/integration/runners/test_saltutil.py index 22ae12285ac..edc81f24f1b 100644 --- a/tests/pytests/integration/runners/test_saltutil.py +++ b/tests/pytests/integration/runners/test_saltutil.py @@ -98,6 +98,36 @@ def world(): assert "{}.hello".format(module_type) in ret.stdout +def test_sync_refresh_false( + module_type, module_sync_functions, salt_run_cli, salt_minion, salt_master +): + """ + Ensure modules are synced when various sync functions are called + """ + module_name = "hello_sync_{}".format(module_type) + module_contents = """ +def __virtual__(): + return "hello" + +def world(): + return "world" +""" + + test_moduledir = salt_master.state_tree.base.write_path / "_{}".format(module_type) + test_moduledir.mkdir(parents=True, exist_ok=True) + module_tempfile = salt_master.state_tree.base.temp_file( + "_{}/{}.py".format(module_type, module_name), module_contents + ) + + with module_tempfile: + salt_cmd = "saltutil.sync_{}".format(module_sync_functions[module_type]) + ret = salt_run_cli.run(salt_cmd, saltenv=None, refresh=False) + assert ret.returncode == 0 + assert ( + "saltutil.sync_{}".format(module_sync_functions[module_type]) in ret.stdout + ) + + def _write_module_dir_and_file(module_type, salt_minion, salt_master): """ Write out dummy module to appropriate module location diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index 97527d3dc24..a25877b6d24 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -8,7 +8,19 @@ from tests.support.mock import sentinel as s @pytest.fixture def configure_loader_modules(): - return {saltutil: {"__opts__": {"file_client": "local"}}} + return { + saltutil: { + "__opts__": { + "file_client": "local", + "cachedir": "/tmp", + "pki_dir": "/tmp/pki_dir", + "id": "minion", + "master_uri": "tcp://127.0.0.1:4505", + "__role": "minion", + "keysize": 2048, + } + } + } def test_exec_kwargs(): @@ -90,12 +102,24 @@ def test_refresh_grains_default_clean_pillar_cache(): refresh_pillar.assert_called_with(clean_cache=False) +def test_refresh_grains_default_clean_pillar_cache_with_refresh_false(): + with patch("salt.modules.saltutil.refresh_modules") as refresh_modules: + saltutil.refresh_grains(refresh_pillar=False) + refresh_modules.assert_called() + + def test_refresh_grains_clean_pillar_cache(): with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: saltutil.refresh_grains(clean_pillar_cache=True) refresh_pillar.assert_called_with(clean_cache=True) +def test_refresh_grains_clean_pillar_cache_with_refresh_false(): + with patch("salt.modules.saltutil.refresh_modules") as refresh_modules: + saltutil.refresh_grains(clean_pillar_cache=True, refresh_pillar=False) + refresh_modules.assert_called() + + def test_sync_grains_default_clean_pillar_cache(): with patch("salt.modules.saltutil._sync"): with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: @@ -136,3 +160,46 @@ def test_sync_all_clean_pillar_cache(): with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: saltutil.sync_all(clean_pillar_cache=True) refresh_pillar.assert_called_with(clean_cache=True) + + +@pytest.mark.skip_on_windows(reason="making use of /tmp directory") +def test_list_extmods(salt_call_cli): + ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/extmods/dummydir") + assert ret.returncode == 0 + + ret = saltutil.list_extmods() + assert "dummydir" in ret + assert ret["dummydir"] == [] + + +def test_refresh_beacons(): + ret = saltutil.refresh_beacons() + assert ret is False + + +def test_refresh_matchers(): + ret = saltutil.refresh_matchers() + assert ret is False + + +def test_refresh_modules_async_false(): + ## ret = saltutil.refresh_modules( kwargs({"async": False}) ) + kwargs = {"async": False} + ret = saltutil.refresh_modules(**kwargs) + assert ret is False + + +def test_clear_job_cache(salt_call_cli): + ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/minion_jobs/dummydir") + assert ret.returncode == 0 + + ret = saltutil.clear_job_cache(hours=1) + assert ret is True + + +@pytest.mark.destructive_test +def test_regen_keys(salt_call_cli): + ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/pki_dir/dummydir") + assert ret.returncode == 0 + + saltutil.regen_keys() From be3b6cd64b0876afac3c519a888fd3801b5333fa Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 9 Oct 2023 12:54:31 -0600 Subject: [PATCH 051/196] Skip some tests on Windows --- tests/pytests/unit/modules/test_saltutil.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index a25877b6d24..e0bee7e8de3 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -189,6 +189,7 @@ def test_refresh_modules_async_false(): assert ret is False +@pytest.mark.skip_on_windows(reason="making use of /tmp directory") def test_clear_job_cache(salt_call_cli): ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/minion_jobs/dummydir") assert ret.returncode == 0 @@ -197,6 +198,7 @@ def test_clear_job_cache(salt_call_cli): assert ret is True +@pytest.mark.skip_on_windows(reason="making use of /tmp directory") @pytest.mark.destructive_test def test_regen_keys(salt_call_cli): ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/pki_dir/dummydir") From b92b13cf0779953101e616aee1ab22ecb5970c3d Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 19 Oct 2023 13:56:55 -0600 Subject: [PATCH 052/196] Update to f-strings as per reviewers comments --- .../integration/runners/test_saltutil.py | 28 +++++++++---------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/tests/pytests/integration/runners/test_saltutil.py b/tests/pytests/integration/runners/test_saltutil.py index edc81f24f1b..cc194030adc 100644 --- a/tests/pytests/integration/runners/test_saltutil.py +++ b/tests/pytests/integration/runners/test_saltutil.py @@ -76,7 +76,7 @@ def test_sync( """ Ensure modules are synced when various sync functions are called """ - module_name = "hello_sync_{}".format(module_type) + module_name = f"hello_sync_{module_type}" module_contents = """ def __virtual__(): return "hello" @@ -85,17 +85,17 @@ def world(): return "world" """ - test_moduledir = salt_master.state_tree.base.write_path / "_{}".format(module_type) + test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}" test_moduledir.mkdir(parents=True, exist_ok=True) module_tempfile = salt_master.state_tree.base.temp_file( - "_{}/{}.py".format(module_type, module_name), module_contents + f"_{module_type}/{module_name}.py", module_contents ) with module_tempfile: - salt_cmd = "saltutil.sync_{}".format(module_sync_functions[module_type]) + salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}" ret = salt_run_cli.run(salt_cmd) assert ret.returncode == 0 - assert "{}.hello".format(module_type) in ret.stdout + assert f"{module_type}.hello" in ret.stdout def test_sync_refresh_false( @@ -104,7 +104,7 @@ def test_sync_refresh_false( """ Ensure modules are synced when various sync functions are called """ - module_name = "hello_sync_{}".format(module_type) + module_name = f"hello_sync_{module_type}" module_contents = """ def __virtual__(): return "hello" @@ -113,19 +113,17 @@ def world(): return "world" """ - test_moduledir = salt_master.state_tree.base.write_path / "_{}".format(module_type) + test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}" test_moduledir.mkdir(parents=True, exist_ok=True) module_tempfile = salt_master.state_tree.base.temp_file( - "_{}/{}.py".format(module_type, module_name), module_contents + f"_{module_type}/{module_name}.py", module_contents ) with module_tempfile: - salt_cmd = "saltutil.sync_{}".format(module_sync_functions[module_type]) + salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}" ret = salt_run_cli.run(salt_cmd, saltenv=None, refresh=False) assert ret.returncode == 0 - assert ( - "saltutil.sync_{}".format(module_sync_functions[module_type]) in ret.stdout - ) + assert f"saltutil.sync_{module_sync_functions[module_type]}" in ret.stdout def _write_module_dir_and_file(module_type, salt_minion, salt_master): @@ -141,11 +139,11 @@ def world(): return "world" """ - test_moduledir = salt_master.state_tree.base.paths[0] / "_{}".format(module_type) + test_moduledir = salt_master.state_tree.base.paths[0] / f"_{module_type}" test_moduledir.mkdir(parents=True, exist_ok=True) module_tempfile = salt_master.state_tree.base.temp_file( - "_{}/{}.py".format(module_type, module_name), module_contents + f"_{module_type}/{module_name}.py", module_contents ) return module_tempfile @@ -169,4 +167,4 @@ def test_sync_all(salt_run_cli, salt_minion, salt_master): assert ret.returncode == 0 for module_type in get_module_types(): - assert "{}.hello".format(module_type) in ret.stdout + assert f"{module_type}.hello" in ret.stdout From 3a9d28634cea197c456067b243ebf4abb10025d8 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 27 Oct 2023 11:21:39 -0600 Subject: [PATCH 053/196] Updated test as per reviewer comments --- tests/pytests/unit/modules/test_saltutil.py | 41 ++++++++------------- 1 file changed, 15 insertions(+), 26 deletions(-) diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index e0bee7e8de3..a736f1998e3 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -1,3 +1,5 @@ +import pathlib + import pytest import salt.modules.saltutil as saltutil @@ -7,18 +9,11 @@ from tests.support.mock import sentinel as s @pytest.fixture -def configure_loader_modules(): +def configure_loader_modules(minion_opts): + minion_opts["file_client"] = "local" return { saltutil: { - "__opts__": { - "file_client": "local", - "cachedir": "/tmp", - "pki_dir": "/tmp/pki_dir", - "id": "minion", - "master_uri": "tcp://127.0.0.1:4505", - "__role": "minion", - "keysize": 2048, - } + "__opts__": minion_opts, } } @@ -162,11 +157,10 @@ def test_sync_all_clean_pillar_cache(): refresh_pillar.assert_called_with(clean_cache=True) -@pytest.mark.skip_on_windows(reason="making use of /tmp directory") -def test_list_extmods(salt_call_cli): - ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/extmods/dummydir") - assert ret.returncode == 0 - +def test_list_extmods(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["cachedir"], "extmods", "dummydir").mkdir( + parents=True, exist_ok=True + ) ret = saltutil.list_extmods() assert "dummydir" in ret assert ret["dummydir"] == [] @@ -183,25 +177,20 @@ def test_refresh_matchers(): def test_refresh_modules_async_false(): - ## ret = saltutil.refresh_modules( kwargs({"async": False}) ) kwargs = {"async": False} ret = saltutil.refresh_modules(**kwargs) assert ret is False -@pytest.mark.skip_on_windows(reason="making use of /tmp directory") -def test_clear_job_cache(salt_call_cli): - ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/minion_jobs/dummydir") - assert ret.returncode == 0 - +def test_clear_job_cache(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["cachedir"], "minion_jobs", "dummydir").mkdir( + parents=True, exist_ok=True + ) ret = saltutil.clear_job_cache(hours=1) assert ret is True -@pytest.mark.skip_on_windows(reason="making use of /tmp directory") @pytest.mark.destructive_test -def test_regen_keys(salt_call_cli): - ret = salt_call_cli.run("--local", "cmd.run", "mkdir -p /tmp/pki_dir/dummydir") - assert ret.returncode == 0 - +def test_regen_keys(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["pki_dir"], "dummydir").mkdir(parents=True, exist_ok=True) saltutil.regen_keys() From e7b5a4e0d8683fbd71a5976881608af910b9b50d Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 30 Oct 2023 09:49:59 -0600 Subject: [PATCH 054/196] Updated test per reviewer's comments --- tests/pytests/unit/modules/test_saltutil.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index a736f1998e3..42986c464e1 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -11,6 +11,7 @@ from tests.support.mock import sentinel as s @pytest.fixture def configure_loader_modules(minion_opts): minion_opts["file_client"] = "local" + minion_opts["master_uri"] = "tcp://127.0.0.1:4505" return { saltutil: { "__opts__": minion_opts, From d94312b5468374dd70d2f1dd21dff509c7f2314b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 10 Nov 2023 14:56:59 +0000 Subject: [PATCH 055/196] Add support for AmazonLinux 2023 when building repos Signed-off-by: Pedro Algarvio --- tools/pkg/repo/create.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index a131c5fa32b..b1cc0471f9e 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -320,7 +320,7 @@ def debian( _rpm_distro_info = { - "amazon": ["2"], + "amazon": ["2", "2023"], "redhat": ["7", "8", "9"], "fedora": ["36", "37", "38"], "photon": ["3", "4", "5"], From bb4d8e25edde69af9378a18f9fb63a9a07afa74f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 10 Nov 2023 15:10:21 +0000 Subject: [PATCH 056/196] Fix package test matrix for amazon-linux-2023 Signed-off-by: Pedro Algarvio --- tools/ci.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/tools/ci.py b/tools/ci.py index 81d059466ba..4e81f3e8411 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -747,6 +747,9 @@ def pkg_matrix( "photonos-4-arm64", "photonos-5", "photonos-5-arm64", + "amazonlinux-2-arm64", + "amazonlinux-2023", + "amazonlinux-2023-arm64", ] and pkg_type != "MSI" ): @@ -824,6 +827,13 @@ def pkg_matrix( ): # We never build packages for these OSs in 3006 continue + if ( + version + and distro_slug.startswith("amazonlinux-2023") + and version < tools.utils.Version("3006.6") + ): + # We never build packages for AmazonLinux 2023 prior to 3006.5 + continue _matrix.append( { "test-chunk": session, From 11dd2ec8a9cc82dfe36866ed26d6ee8a5e9c566c Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 10 Nov 2023 16:28:01 +0000 Subject: [PATCH 057/196] Use the `grains` fixture Signed-off-by: Pedro Algarvio --- pkg/tests/integration/test_systemd_config.py | 44 ++++++++------------ 1 file changed, 18 insertions(+), 26 deletions(-) diff --git a/pkg/tests/integration/test_systemd_config.py b/pkg/tests/integration/test_systemd_config.py index 0ac1379b94b..6c530b51db2 100644 --- a/pkg/tests/integration/test_systemd_config.py +++ b/pkg/tests/integration/test_systemd_config.py @@ -7,17 +7,13 @@ pytestmark = [ ] -def test_system_config(salt_cli, salt_minion): +@pytest.mark.usefixtures("salt_minion") +def test_system_config(grains): """ Test system config """ - get_family = salt_cli.run("grains.get", "os_family", minion_tgt=salt_minion.id) - assert get_family.returncode == 0 - get_finger = salt_cli.run("grains.get", "osfinger", minion_tgt=salt_minion.id) - assert get_finger.returncode == 0 - - if get_family.data == "RedHat": - if get_finger.data in ( + if grains["os_family"] == "RedHat": + if grains["osfinger"] in ( "CentOS Stream-8", "CentOS Linux-8", "CentOS Stream-9", @@ -27,24 +23,20 @@ def test_system_config(salt_cli, salt_minion): "VMware Photon OS-5", "Amazon Linux-2023", ): - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 0 + expected_retcode = 0 else: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 1 + expected_retcode = 1 + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == expected_retcode - elif "Debian" in get_family.stdout: - if "Debian-9" in get_finger.stdout: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 1 + elif grains["os_family"] == "Debian": + if grains["osfinger"] == "Debian-9": + expected_retcode = 1 else: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 0 + expected_retcode = 0 + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == expected_retcode From 471ca4654fef88d843cdd36d103b0913fa7b185b Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 5 Oct 2023 18:08:36 -0400 Subject: [PATCH 058/196] Migrate `tests/unit/utils/parsers/test_log_parsers.py` to pytest --- .../unit/utils/parsers/test_daemon_mixin.py | 79 + .../unit/utils/parsers/test_log_parsers.py | 775 ++++++++++ tests/unit/utils/test_parsers.py | 1283 ----------------- 3 files changed, 854 insertions(+), 1283 deletions(-) create mode 100644 tests/pytests/unit/utils/parsers/test_daemon_mixin.py create mode 100644 tests/pytests/unit/utils/parsers/test_log_parsers.py delete mode 100644 tests/unit/utils/test_parsers.py diff --git a/tests/pytests/unit/utils/parsers/test_daemon_mixin.py b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py new file mode 100644 index 00000000000..0ecddd9280d --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py @@ -0,0 +1,79 @@ +""" +Tests the PIDfile deletion in the DaemonMixIn. +""" + +import logging + +import pytest + +import salt.utils.parsers +from tests.support.mock import ANY, MagicMock, patch + + +@pytest.fixture +def daemon_mixin(): + mixin = salt.utils.parsers.DaemonMixIn() + mixin.config = {} + mixin.config["pidfile"] = "/some/fake.pid" + return mixin + + +def test_pid_file_deletion(daemon_mixin): + """ + PIDfile deletion without exception. + """ + with patch("os.unlink", MagicMock()) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_not_called() + log_mock.debug.assert_not_called() + + +def test_pid_deleted_oserror_as_root(daemon_mixin): + """ + PIDfile deletion with exception, running as root. + """ + with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + if salt.utils.platform.is_windows(): + patch_args = ( + "salt.utils.win_functions.is_admin", + MagicMock(return_value=True), + ) + else: + patch_args = ("os.getuid", MagicMock(return_value=0)) + + with patch(*patch_args): + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_called_with( + "PIDfile(%s) could not be deleted: %s", + format(daemon_mixin.config["pidfile"], ""), + ANY, + exc_info_on_loglevel=logging.DEBUG, + ) + + +def test_pid_deleted_oserror_as_non_root(daemon_mixin): + """ + PIDfile deletion with exception, running as non-root. + """ + with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + if salt.utils.platform.is_windows(): + patch_args = ( + "salt.utils.win_functions.is_admin", + MagicMock(return_value=False), + ) + else: + patch_args = ("os.getuid", MagicMock(return_value=1000)) + + with patch(*patch_args): + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_not_called() + log_mock.debug.assert_not_called() diff --git a/tests/pytests/unit/utils/parsers/test_log_parsers.py b/tests/pytests/unit/utils/parsers/test_log_parsers.py new file mode 100644 index 00000000000..52a0958b10c --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_log_parsers.py @@ -0,0 +1,775 @@ +""" + :codeauthor: Denys Havrysh +""" + +import logging +import os +import pprint + +import pytest + +import salt._logging +import salt.config +import salt.syspaths +import salt.utils.jid +import salt.utils.parsers +import salt.utils.platform +from tests.support.helpers import TstSuiteLoggingHandler +from tests.support.mock import MagicMock, patch + +log = logging.getLogger(__name__) + + +class LogImplMock: + """ + Logger setup + """ + + def __init__(self): + """ + init + """ + self.log_level_console = None + self.log_file = None + self.log_level_logfile = None + self.config = self.original_config = None + logging_options = salt._logging.get_logging_options_dict() + if logging_options: + self.config = logging_options.copy() + self.original_config = self.config.copy() + self.temp_log_level = None + self._console_handler_configured = False + self._extended_logging_configured = False + self._logfile_handler_configured = False + self._real_set_logging_options_dict = salt._logging.set_logging_options_dict + self._real_get_logging_options_dict = salt._logging.get_logging_options_dict + self._real_setup_logfile_handler = salt._logging.setup_logfile_handler + + def _destroy(self): + salt._logging.set_logging_options_dict.__options_dict__ = self.original_config + salt._logging.shutdown_logfile_handler() + + def setup_temp_handler(self, log_level=None): + """ + Set temp handler loglevel + """ + log.debug("Setting temp handler log level to: %s", log_level) + self.temp_log_level = log_level + + def is_console_handler_configured(self): + log.debug("Calling is_console_handler_configured") + return self._console_handler_configured + + def setup_console_handler( + self, log_level="error", **kwargs + ): # pylint: disable=unused-argument + """ + Set console loglevel + """ + log.debug("Setting console handler log level to: %s", log_level) + self.log_level_console = log_level + self._console_handler_configured = True + + def shutdown_console_handler(self): + log.debug("Calling shutdown_console_handler") + self._console_handler_configured = False + + def is_extended_logging_configured(self): + log.debug("Calling is_extended_logging_configured") + return self._extended_logging_configured + + def setup_extended_logging(self, opts): + """ + Set opts + """ + log.debug("Calling setup_extended_logging") + self._extended_logging_configured = True + + def shutdown_extended_logging(self): + log.debug("Calling shutdown_extended_logging") + self._extended_logging_configured = False + + def is_logfile_handler_configured(self): + log.debug("Calling is_logfile_handler_configured") + return self._logfile_handler_configured + + def setup_logfile_handler( + self, log_path, log_level=None, **kwargs + ): # pylint: disable=unused-argument + """ + Set logfile and loglevel + """ + log.debug("Setting log file handler path to: %s", log_path) + log.debug("Setting log file handler log level to: %s", log_level) + self.log_file = log_path + self.log_level_logfile = log_level + self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs) + self._logfile_handler_configured = True + + def shutdown_logfile_handler(self): + log.debug("Calling shutdown_logfile_handler") + self._logfile_handler_configured = False + + def get_logging_options_dict(self): + log.debug("Calling get_logging_options_dict") + return self.config + + def set_logging_options_dict(self, opts): + log.debug("Calling set_logging_options_dict") + self._real_set_logging_options_dict(opts) + self.config = self._real_get_logging_options_dict() + log.debug("Logging options dict:\n%s", pprint.pformat(self.config)) + + def setup_log_granular_levels(self, opts): + log.debug("Calling setup_log_granular_levels") + + def setup_logging(self): + log.debug("Mocked setup_logging called") + # Wether daemonizing or not, either on the main process or on a separate process + # The log file is going to be configured. + # The console is the only handler not configured if daemonizing + + # These routines are what happens on salt._logging.setup_logging + opts = self.get_logging_options_dict() + + if ( + opts.get("configure_console_logger", True) + and not self.is_console_handler_configured() + ): + self.setup_console_handler( + log_level=opts["log_level"], + log_format=opts["log_fmt_console"], + date_format=opts["log_datefmt"], + ) + if ( + opts.get("configure_file_logger", True) + and not self.is_logfile_handler_configured() + ): + log_file_level = opts["log_level_logfile"] or opts["log_level"] + if log_file_level != "quiet": + self.setup_logfile_handler( + log_path=opts[opts["log_file_key"]], + log_level=log_file_level, + log_format=opts["log_fmt_logfile"], + date_format=opts["log_datefmt_logfile"], + max_bytes=opts["log_rotate_max_bytes"], + backup_count=opts["log_rotate_backup_count"], + user=opts["user"], + ) + if not self.is_extended_logging_configured(): + self.setup_extended_logging(opts) + self.setup_log_granular_levels(opts["log_granular_levels"]) + + +# <----------- START TESTS -----------> + + +@pytest.fixture +def root_dir(tmp_path): + yield tmp_path / "parsers_tests_root_dir" + + +@pytest.fixture( + params=[ + "master", + "minion", + "proxyminion", + "syndic", + "saltcmd", + "saltcp", + "saltkey", + "saltcall", + "saltrun", + "saltssh", + "saltcloud", + "spm", + "saltapi", + ] +) +def log_cli_parser(request): + return request.param + + +@pytest.fixture +def default_config(log_cli_parser): + param_map = { + "master": salt.config.DEFAULT_MASTER_OPTS.copy(), + "minion": salt.config.DEFAULT_MINION_OPTS.copy(), + "proxyminion": { + **salt.config.DEFAULT_MINION_OPTS.copy(), + **salt.config.DEFAULT_PROXY_MINION_OPTS, + }, + "syndic": salt.config.DEFAULT_MASTER_OPTS.copy(), + "saltcmd": salt.config.DEFAULT_MASTER_OPTS.copy(), + "saltcp": salt.config.DEFAULT_MASTER_OPTS.copy(), + "saltkey": salt.config.DEFAULT_MASTER_OPTS.copy(), + "saltcall": salt.config.DEFAULT_MINION_OPTS.copy(), + "saltrun": salt.config.DEFAULT_MASTER_OPTS.copy(), + "saltssh": salt.config.DEFAULT_MASTER_OPTS.copy(), + "saltcloud": { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_CLOUD_OPTS, + }, + "spm": { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_SPM_OPTS, + }, + "saltapi": { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_API_OPTS, + }, + } + return param_map[log_cli_parser] + + +@pytest.fixture +def parser(log_cli_parser): + param_map = { + "master": salt.utils.parsers.MasterOptionParser, + "minion": salt.utils.parsers.MinionOptionParser, + "proxyminion": salt.utils.parsers.ProxyMinionOptionParser, + "syndic": salt.utils.parsers.SyndicOptionParser, + "saltcmd": salt.utils.parsers.SaltCMDOptionParser, + "saltcp": salt.utils.parsers.SaltCPOptionParser, + "saltkey": salt.utils.parsers.SaltKeyOptionParser, + "saltcall": salt.utils.parsers.SaltCallOptionParser, + "saltrun": salt.utils.parsers.SaltRunOptionParser, + "saltssh": salt.utils.parsers.SaltSSHOptionParser, + "saltcloud": salt.utils.parsers.SaltCloudParser, + "spm": salt.utils.parsers.SPMParser, + "saltapi": salt.utils.parsers.SaltAPIParser, + } + return param_map[log_cli_parser] + + +@pytest.fixture +def config_func(log_cli_parser): + param_map = { + "master": "salt.config.master_config", + "minion": "salt.config.minion_config", + "proxyminion": "salt.config.proxy_config", + "syndic": "salt.config.syndic_config", + "saltcmd": "salt.config.client_config", + "saltcp": "salt.config.master_config", + "saltkey": "salt.config.client_config", + "saltcall": "salt.config.minion_config", + "saltrun": "salt.config.master_config", + "saltssh": "salt.config.master_config", + "saltcloud": "salt.config.cloud_config", + "spm": "salt.config.spm_config", + "saltapi": "salt.config.api_config", + } + return param_map[log_cli_parser] + + +@pytest.fixture +def log_file(tmp_path, logfile_config_setting_name): + return str(tmp_path / logfile_config_setting_name) + + +@pytest.fixture +def args(log_cli_parser): + if log_cli_parser in ("saltcmd", "saltssh"): + return ["foo", "bar.baz"] + elif log_cli_parser == "saltcp": + return ["foo", "bar", "baz"] + elif log_cli_parser in ("saltcall", "saltrun"): + return ["foo.bar"] + elif log_cli_parser == "saltcloud": + return ["-p", "foo", "bar"] + elif log_cli_parser == "spm": + return ["foo", "bar"] + return [] + + +@pytest.fixture +def loglevel_config_setting_name(): + return "log_level" + + +@pytest.fixture +def logfile_config_setting_name(log_cli_parser): + if log_cli_parser == "syndic": + return "syndic_log_file" + elif log_cli_parser == "saltkey": + return "key_logfile" + elif log_cli_parser == "saltssh": + return "ssh_log_file" + elif log_cli_parser == "spm": + return "spm_logfile" + elif log_cli_parser == "saltapi": + return "api_logfile" + return "log_file" + + +@pytest.fixture +def logfile_loglevel_config_setting_name(): + return "log_level_logfile" + + +@pytest.fixture +def testing_config(default_config, root_dir, logfile_config_setting_name, log_file): + _testing_config = default_config.copy() + _testing_config["root_dir"] = root_dir + for name in ("pki_dir", "cachedir"): + _testing_config[name] = name + _testing_config[logfile_config_setting_name] = log_file + return _testing_config + + +@pytest.fixture(autouse=True) +def log_impl(): + """ + Mock logger functions + """ + _log_impl = LogImplMock() + mocked_functions = {} + for name in dir(_log_impl): + if name.startswith("_"): + continue + func = getattr(_log_impl, name) + if not callable(func): + continue + mocked_functions[name] = func + + patcher = patch.multiple(salt._logging, **mocked_functions) + with patcher: + yield _log_impl + _log_impl._destroy() + + +def test_get_log_level_cli( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match command-line specified value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + # Set log level in CLI + log_level = "critical" + args = ["--log-level", log_level] + args + + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + + console_log_level = getattr(parser.options, loglevel_config_setting_name) + + # Check console log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == log_level + # Check log file logger log level + assert log_impl.log_level_logfile == default_log_level + + +def test_get_log_level_config( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match the configured value + """ + # Set log level in config + log_level = "info" + opts = testing_config.copy() + opts.update({loglevel_config_setting_name: log_level}) + + parser = parser() + with patch(config_func, MagicMock(return_value=opts)): + parser.parse_args(args) + + console_log_level = getattr(parser.options, loglevel_config_setting_name) + + # Check console log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == "error" + # Check log file logger log level + assert log_impl.log_level_logfile == log_level + + +def test_get_log_level_default( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match the default value + """ + # Set defaults + log_level = default_log_level = testing_config[loglevel_config_setting_name] + + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + + console_log_level = getattr(parser.options, loglevel_config_setting_name) + + # Check log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == "error" + # Check log file logger + assert log_impl.log_level_logfile == default_log_level + # Check help message + assert ( + "Default: '{}'.".format(default_log_level) + in parser.get_option("--log-level").help + ) + + +# log file configuration tests + + +def test_get_log_file_cli( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + log_file, + logfile_config_setting_name, +): + """ + Tests that log file match command-line specified value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file in CLI + log_file = "{}_cli.log".format(log_file) + args = ["--log-file", log_file] + args + + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + + log_file_option = getattr(parser.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + + +def test_get_log_file_config( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_config_setting_name, + log_file, +): + """ + Tests that log file match the configured value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file in config + log_file = "{}_config.log".format(log_file) + opts = testing_config.copy() + opts.update({logfile_config_setting_name: log_file}) + + parser = parser() + with patch(config_func, MagicMock(return_value=opts)): + parser.parse_args(args) + + log_file_option = getattr(parser.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + + +def test_get_log_file_default( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_config_setting_name, + default_config, +): + """ + Tests that log file match the default value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + log_file = testing_config[logfile_config_setting_name] + default_log_file = default_config[logfile_config_setting_name] + + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + + log_file_option = getattr(parser.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + # Check help message + assert ( + "Default: '{}'.".format(default_log_file) + in parser.get_option("--log-file").help + ) + + +# log file log level configuration tests + + +def test_get_log_file_level_cli( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that file log level match command-line specified value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + # Set log file level in CLI + log_level_logfile = "error" + args = ["--log-file-level", log_level_logfile] + args + + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + + log_level_logfile_option = getattr( + parser.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == default_log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == default_log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_get_log_file_level_config( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that log file level match the configured value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file level in config + log_level_logfile = "info" + opts = testing_config.copy() + opts.update({logfile_loglevel_config_setting_name: log_level_logfile}) + + parser = parser() + with patch(config_func, MagicMock(return_value=opts)): + parser.parse_args(args) + + log_level_logfile_option = getattr( + parser.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_get_log_file_level_default( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that log file level match the default value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + log_level = default_log_level + log_level_logfile = default_log_level + + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + + log_level_logfile_option = getattr( + parser.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + # Check help message + assert ( + "Default: '{}'.".format(default_log_level) + in parser.get_option("--log-file-level").help + ) + + +def test_get_console_log_level_with_file_log_level( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): # pylint: disable=invalid-name + """ + Tests that both console log level and log file level setting are working together + """ + log_level = "critical" + log_level_logfile = "debug" + + args = ["--log-file-level", log_level_logfile] + args + + opts = testing_config.copy() + opts.update({loglevel_config_setting_name: log_level}) + + parser = parser() + with patch(config_func, MagicMock(return_value=opts)): + parser.parse_args(args) + + log_level_logfile_option = getattr( + parser.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_log_created( + testing_config, args, parser, config_func, logfile_config_setting_name, log_file +): + """ + Tests that log file is created + """ + opts = testing_config.copy() + opts.update({"log_file": str(log_file)}) + log_file_name = str(log_file) + if log_file_name.split(os.sep)[-1] != "log_file": + opts.update({log_file_name: str(log_file)}) + + parser = parser() + with patch(config_func, MagicMock(return_value=opts)): + parser.parse_args(args) + + assert os.path.exists(str(log_file_name)) + + +def test_callbacks_uniqueness(parser): + """ + Test that the callbacks are only added once, no matter + how many instances of the parser we create + """ + mixin_container_names = ( + "_mixin_setup_funcs", + "_mixin_process_funcs", + "_mixin_after_parsed_funcs", + "_mixin_before_exit_funcs", + ) + _parser = parser() + nums_1 = {} + for cb_container in mixin_container_names: + obj = getattr(_parser, cb_container) + nums_1[cb_container] = len(obj) + + # The next time we instantiate the parser, the counts should be equal + _parser = parser() + nums_2 = {} + for cb_container in mixin_container_names: + obj = getattr(_parser, cb_container) + nums_2[cb_container] = len(obj) + assert nums_1 == nums_2 + + +def test_verify_log_warning_logged(args, config_func, testing_config, parser): + args = ["--log-level", "debug"] + args + with TstSuiteLoggingHandler(level=logging.DEBUG) as handler: + parser = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + parser.parse_args(args) + assert ( + "WARNING:Insecure logging configuration detected! Sensitive data may be logged." + in handler.messages + ) diff --git a/tests/unit/utils/test_parsers.py b/tests/unit/utils/test_parsers.py deleted file mode 100644 index 06e75d5d7a7..00000000000 --- a/tests/unit/utils/test_parsers.py +++ /dev/null @@ -1,1283 +0,0 @@ -""" - :codeauthor: Denys Havrysh -""" - -import logging -import os -import pprint -import shutil -import tempfile - -import salt._logging -import salt.config -import salt.syspaths -import salt.utils.jid -import salt.utils.parsers -import salt.utils.platform -from tests.support.helpers import TstSuiteLoggingHandler -from tests.support.mock import ANY, MagicMock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - - -class ErrorMock: # pylint: disable=too-few-public-methods - """ - Error handling - """ - - def __init__(self): - """ - init - """ - self.msg = None - - def error(self, msg): - """ - Capture error message - """ - self.msg = msg - - -class LogImplMock: - """ - Logger setup - """ - - def __init__(self): - """ - init - """ - self.log_level_console = None - self.log_file = None - self.log_level_logfile = None - self.config = self.original_config = None - logging_options = salt._logging.get_logging_options_dict() - if logging_options: - self.config = logging_options.copy() - self.original_config = self.config.copy() - self.temp_log_level = None - self._console_handler_configured = False - self._extended_logging_configured = False - self._logfile_handler_configured = False - self._real_set_logging_options_dict = salt._logging.set_logging_options_dict - self._real_get_logging_options_dict = salt._logging.get_logging_options_dict - self._real_setup_logfile_handler = salt._logging.setup_logfile_handler - - def _destroy(self): - salt._logging.set_logging_options_dict.__options_dict__ = self.original_config - salt._logging.shutdown_logfile_handler() - - def setup_temp_handler(self, log_level=None): - """ - Set temp handler loglevel - """ - log.debug("Setting temp handler log level to: %s", log_level) - self.temp_log_level = log_level - - def is_console_handler_configured(self): - log.debug("Calling is_console_handler_configured") - return self._console_handler_configured - - def setup_console_handler( - self, log_level="error", **kwargs - ): # pylint: disable=unused-argument - """ - Set console loglevel - """ - log.debug("Setting console handler log level to: %s", log_level) - self.log_level_console = log_level - self._console_handler_configured = True - - def shutdown_console_handler(self): - log.debug("Calling shutdown_console_handler") - self._console_handler_configured = False - - def is_extended_logging_configured(self): - log.debug("Calling is_extended_logging_configured") - return self._extended_logging_configured - - def setup_extended_logging(self, opts): - """ - Set opts - """ - log.debug("Calling setup_extended_logging") - self._extended_logging_configured = True - - def shutdown_extended_logging(self): - log.debug("Calling shutdown_extended_logging") - self._extended_logging_configured = False - - def is_logfile_handler_configured(self): - log.debug("Calling is_logfile_handler_configured") - return self._logfile_handler_configured - - def setup_logfile_handler( - self, log_path, log_level=None, **kwargs - ): # pylint: disable=unused-argument - """ - Set logfile and loglevel - """ - log.debug("Setting log file handler path to: %s", log_path) - log.debug("Setting log file handler log level to: %s", log_level) - self.log_file = log_path - self.log_level_logfile = log_level - self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs) - self._logfile_handler_configured = True - - def shutdown_logfile_handler(self): - log.debug("Calling shutdown_logfile_handler") - self._logfile_handler_configured = False - - def get_logging_options_dict(self): - log.debug("Calling get_logging_options_dict") - return self.config - - def set_logging_options_dict(self, opts): - log.debug("Calling set_logging_options_dict") - self._real_set_logging_options_dict(opts) - self.config = self._real_get_logging_options_dict() - log.debug("Logging options dict:\n%s", pprint.pformat(self.config)) - - def setup_log_granular_levels(self, opts): - log.debug("Calling setup_log_granular_levels") - - def setup_logging(self): - log.debug("Mocked setup_logging called") - # Wether daemonizing or not, either on the main process or on a separate process - # The log file is going to be configured. - # The console is the only handler not configured if daemonizing - - # These routines are what happens on salt._logging.setup_logging - opts = self.get_logging_options_dict() - - if ( - opts.get("configure_console_logger", True) - and not self.is_console_handler_configured() - ): - self.setup_console_handler( - log_level=opts["log_level"], - log_format=opts["log_fmt_console"], - date_format=opts["log_datefmt"], - ) - if ( - opts.get("configure_file_logger", True) - and not self.is_logfile_handler_configured() - ): - log_file_level = opts["log_level_logfile"] or opts["log_level"] - if log_file_level != "quiet": - self.setup_logfile_handler( - log_path=opts[opts["log_file_key"]], - log_level=log_file_level, - log_format=opts["log_fmt_logfile"], - date_format=opts["log_datefmt_logfile"], - max_bytes=opts["log_rotate_max_bytes"], - backup_count=opts["log_rotate_backup_count"], - user=opts["user"], - ) - if not self.is_extended_logging_configured(): - self.setup_extended_logging(opts) - self.setup_log_granular_levels(opts["log_granular_levels"]) - - -class ObjectView: # pylint: disable=too-few-public-methods - """ - Dict object view - """ - - def __init__(self, d): - self.__dict__ = d - - -class ParserBase: - """ - Unit Tests for Log Level Mixin with Salt parsers - """ - - args = [] - - log_impl = None - - # Set config option names - loglevel_config_setting_name = "log_level" - logfile_config_setting_name = "log_file" - logfile_loglevel_config_setting_name = ( - "log_level_logfile" # pylint: disable=invalid-name - ) - - @classmethod - def setUpClass(cls): - cls.root_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.root_dir, ignore_errors=True) - - def setup_log(self): - """ - Mock logger functions - """ - testing_config = self.default_config.copy() - testing_config["root_dir"] = self.root_dir - for name in ("pki_dir", "cachedir"): - testing_config[name] = name - testing_config[self.logfile_config_setting_name] = getattr( - self, self.logfile_config_setting_name, self.log_file - ) - self.testing_config = testing_config - self.addCleanup(setattr, self, "testing_config", None) - - self.log_impl = LogImplMock() - self.addCleanup(self.log_impl._destroy) - self.addCleanup(setattr, self, "log_impl", None) - - mocked_functions = {} - for name in dir(self.log_impl): - if name.startswith("_"): - continue - func = getattr(self.log_impl, name) - if not callable(func): - continue - mocked_functions[name] = func - patcher = patch.multiple(salt._logging, **mocked_functions) - patcher.start() - self.addCleanup(patcher.stop) - - # log level configuration tests - - def test_get_log_level_cli(self): - """ - Tests that log level match command-line specified value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log level in CLI - log_level = "critical" - args = ["--log-level", log_level] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check console log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, log_level) - # Check log file logger log level - self.assertEqual(self.log_impl.log_level_logfile, default_log_level) - - def test_get_log_level_config(self): - """ - Tests that log level match the configured value - """ - args = self.args - - # Set log level in config - log_level = "info" - opts = self.testing_config.copy() - opts.update({self.loglevel_config_setting_name: log_level}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check console log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file logger log level - self.assertEqual(self.log_impl.log_level_logfile, log_level) - - def test_get_log_level_default(self): - """ - Tests that log level match the default value - """ - # Set defaults - log_level = default_log_level = self.testing_config[ - self.loglevel_config_setting_name - ] - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, default_log_level) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_level), - parser.get_option("--log-level").help, - ) - - # log file configuration tests - - def test_get_log_file_cli(self): - """ - Tests that log file match command-line specified value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log file in CLI - log_file = "{}_cli.log".format(self.log_file) - args = ["--log-file", log_file] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - - def test_get_log_file_config(self): - """ - Tests that log file match the configured value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - args = self.args - - # Set log file in config - log_file = "{}_config.log".format(self.log_file) - opts = self.testing_config.copy() - opts.update({self.logfile_config_setting_name: log_file}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - - def test_get_log_file_default(self): - """ - Tests that log file match the default value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - log_file = self.testing_config[self.logfile_config_setting_name] - default_log_file = self.default_config[self.logfile_config_setting_name] - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_file), - parser.get_option("--log-file").help, - ) - - # log file log level configuration tests - - def test_get_log_file_level_cli(self): - """ - Tests that file log level match command-line specified value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log file level in CLI - log_level_logfile = "error" - args = ["--log-file-level", log_level_logfile] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, default_log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], - default_log_level, - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_get_log_file_level_config(self): - """ - Tests that log file level match the configured value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - args = self.args - - # Set log file level in config - log_level_logfile = "info" - opts = self.testing_config.copy() - opts.update({self.logfile_loglevel_config_setting_name: log_level_logfile}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_get_log_file_level_default(self): - """ - Tests that log file level match the default value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - log_level = default_log_level - log_level_logfile = default_log_level - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_level), - parser.get_option("--log-file-level").help, - ) - - def test_get_console_log_level_with_file_log_level( - self, - ): # pylint: disable=invalid-name - """ - Tests that both console log level and log file level setting are working together - """ - log_level = "critical" - log_level_logfile = "debug" - - args = ["--log-file-level", log_level_logfile] + self.args - - opts = self.testing_config.copy() - opts.update({self.loglevel_config_setting_name: log_level}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_log_created(self): - """ - Tests that log file is created - """ - args = self.args - log_file = self.log_file - log_file_name = self.logfile_config_setting_name - opts = self.testing_config.copy() - opts.update({"log_file": log_file}) - if log_file_name != "log_file": - opts.update({log_file_name: getattr(self, log_file_name)}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - if log_file_name == "log_file": - self.assertGreaterEqual(os.path.getsize(log_file), 0) - else: - self.assertGreaterEqual(os.path.getsize(getattr(self, log_file_name)), 0) - - def test_callbacks_uniqueness(self): - """ - Test that the callbacks are only added once, no matter - how many instances of the parser we create - """ - mixin_container_names = ( - "_mixin_setup_funcs", - "_mixin_process_funcs", - "_mixin_after_parsed_funcs", - "_mixin_before_exit_funcs", - ) - parser = self.parser() - nums_1 = {} - for cb_container in mixin_container_names: - obj = getattr(parser, cb_container) - nums_1[cb_container] = len(obj) - - # The next time we instantiate the parser, the counts should be equal - parser = self.parser() - nums_2 = {} - for cb_container in mixin_container_names: - obj = getattr(parser, cb_container) - nums_2[cb_container] = len(obj) - self.assertDictEqual(nums_1, nums_2) - - def test_verify_log_warning_logged(self): - args = ["--log-level", "debug"] + self.args - with TstSuiteLoggingHandler(level=logging.DEBUG) as handler: - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - self.assertIn( - "WARNING:Insecure logging configuration detected! Sensitive data may be logged.", - handler.messages, - ) - - -class MasterOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_master_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.MasterOptionParser - self.addCleanup(delattr, self, "parser") - - -class MinionOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_minion_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.minion_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.MinionOptionParser - self.addCleanup(delattr, self, "parser") - - -class ProxyMinionOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Proxy Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_PROXY_MINION_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_proxy_minion_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.proxy_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.ProxyMinionOptionParser - self.addCleanup(delattr, self, "parser") - - -class SyndicOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Syndic options - """ - - def setUp(self): - """ - Setting up - """ - # Set config option names - self.logfile_config_setting_name = "syndic_log_file" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_syndic_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - syndic_log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_syndic_log", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.syndic_log_file = syndic_log_file.name - syndic_log_file.close() - # Function to patch - self.config_func = "salt.config.syndic_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SyndicOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCMDOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt CLI options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar.baz"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cmd_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.client_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCMDOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCPOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing salt-cp options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar", "baz"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cp_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCPOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltKeyOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing salt-key options - """ - - def setUp(self): - """ - Setting up - """ - # Set config option names - self.logfile_config_setting_name = "key_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_key_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - key_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_key_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.key_logfile = key_logfile.name - key_logfile.close() - # Function to patch - self.config_func = "salt.config.client_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltKeyOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCallOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo.bar"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_call_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.minion_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCallOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltRunOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo.bar"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_run_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltRunOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltSSHOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar.baz"] - - # Set config option names - self.logfile_config_setting_name = "ssh_log_file" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_ssh_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - ssh_log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_ssh_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.ssh_log_file = ssh_log_file.name - ssh_log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltSSHOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCloudParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["-p", "foo", "bar"] - - # Set default configs - # Cloud configs are merged with master configs in - # config/__init__.py, so we'll do that here as well - # As we need the 'user' key later on. - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_CLOUD_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cloud_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.cloud_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCloudParser - self.addCleanup(delattr, self, "parser") - - -class SPMParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar"] - - # Set config option names - self.logfile_config_setting_name = "spm_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_SPM_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_spm_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - spm_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_spm_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.spm_logfile = spm_logfile.name - spm_logfile.close() - # Function to patch - self.config_func = "salt.config.spm_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SPMParser - self.addCleanup(delattr, self, "parser") - - -class SaltAPIParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = [] - - # Set config option names - self.logfile_config_setting_name = "api_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_API_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_api_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - api_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_api_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.api_logfile = api_logfile.name - api_logfile.close() - # Function to patch - self.config_func = "salt.config.api_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltAPIParser - self.addCleanup(delattr, self, "parser") - - -class DaemonMixInTestCase(TestCase): - """ - Tests the PIDfile deletion in the DaemonMixIn. - """ - - def setUp(self): - """ - Setting up - """ - # Setup mixin - self.daemon_mixin = salt.utils.parsers.DaemonMixIn() - self.daemon_mixin.config = {} - self.daemon_mixin.config["pidfile"] = "/some/fake.pid" - - def tearDown(self): - """ - Tear down test - :return: - """ - del self.daemon_mixin - - @patch("os.unlink", MagicMock()) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_file_deletion(self): - """ - PIDfile deletion without exception. - """ - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_not_called() - salt.utils.parsers.log.debug.assert_not_called() - - @patch("os.unlink", MagicMock(side_effect=OSError())) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_deleted_oserror_as_root(self): - """ - PIDfile deletion with exception, running as root. - """ - if salt.utils.platform.is_windows(): - patch_args = ( - "salt.utils.win_functions.is_admin", - MagicMock(return_value=True), - ) - else: - patch_args = ("os.getuid", MagicMock(return_value=0)) - - with patch(*patch_args): - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_called_with( - "PIDfile(%s) could not be deleted: %s", - format(self.daemon_mixin.config["pidfile"], ""), - ANY, - exc_info_on_loglevel=logging.DEBUG, - ) - - @patch("os.unlink", MagicMock(side_effect=OSError())) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_deleted_oserror_as_non_root(self): - """ - PIDfile deletion with exception, running as non-root. - """ - if salt.utils.platform.is_windows(): - patch_args = ( - "salt.utils.win_functions.is_admin", - MagicMock(return_value=False), - ) - else: - patch_args = ("os.getuid", MagicMock(return_value=1000)) - - with patch(*patch_args): - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_not_called() - salt.utils.parsers.log.debug.assert_not_called() From 7ab967a71b2443d33acd396f3fb7f6353cb7b5a9 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 11 Oct 2023 17:07:54 -0400 Subject: [PATCH 059/196] Add tests for the different ways to get the saltfile option --- .../unit/utils/parsers/test_saltfile_mixin.py | 125 ++++++++++++++++++ 1 file changed, 125 insertions(+) create mode 100644 tests/pytests/unit/utils/parsers/test_saltfile_mixin.py diff --git a/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py new file mode 100644 index 00000000000..5ea20aad5ed --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py @@ -0,0 +1,125 @@ +""" +Tests the SaltfileMixIn. +""" + +import optparse +import shutil + +import pytest + +import salt.utils.parsers +from tests.support.helpers import patched_environ +from tests.support.mock import patch + + +class MockSaltfileParser( + salt.utils.parsers.OptionParser, + salt.utils.parsers.SaltfileMixIn, + metaclass=salt.utils.parsers.OptionParserMeta, +): + def __init__(self, *args, **kwargs): + salt.utils.parsers.OptionParser.__init__(self, *args, **kwargs) + self.config = {} + + def _mixin_setup(self): + self.add_option( + "-l", + "--log-level", + dest="log_level", + default="warning", + help="The log level for salt.", + ) + group = self.output_options_group = optparse.OptionGroup( + self, "Output Options", "Configure your preferred output format." + ) + self.add_option_group(group) + + group.add_option( + "--out", + "--output", + dest="output", + help=( + "Print the output from the '{}' command using the " + "specified outputter.".format( + self.get_prog_name(), + ) + ), + ) + group.add_option( + "--out-file", + "--output-file", + dest="output_file", + default=None, + help="Write the output to the specified file.", + ) + + +@pytest.fixture +def parser(): + return MockSaltfileParser() + + +# @pytest.fixture +# def parser(): +# # Mock this because we don't need it and it causes an error +# # if there is more than one test being run in this file +# with patch.object(salt.utils.parsers.LogLevelMixIn, "_LogLevelMixIn__setup_logging_routines"): +# yield salt.utils.parsers.SaltCallOptionParser() + + +@pytest.fixture +def saltfile(tmp_path): + fp = tmp_path / "Saltfile" + fp.touch() + return fp + + +@pytest.fixture +def base_opts(): + # return ["--local", "test.ping"] + return [] + + +def test_saltfile_in_environment(parser, saltfile, base_opts): + """ + Test setting the SALT_SALTFILE environment variable + """ + with patched_environ(SALT_SALTFILE=str(saltfile)): + parser.parse_args(base_opts) + assert parser.options.saltfile == str(saltfile) + + +def test_saltfile_option(parser, saltfile, base_opts): + """ + Test setting the SALT_SALTFILE environment variable + """ + parser.parse_args(base_opts + ["--saltfile", str(saltfile)]) + assert parser.options.saltfile == str(saltfile) + + +def test_saltfile_cwd(parser, saltfile, base_opts, tmp_path): + """ + Test setting the SALT_SALTFILE environment variable + """ + with patch("os.getcwd", return_value=str(tmp_path)) as cwd_mock: + parser.parse_args(base_opts) + assert parser.options.saltfile == str(saltfile) + cwd_mock.assert_called_once() + + +def test_saltfile_user_home(parser, saltfile, base_opts, tmp_path): + """ + Test setting the SALT_SALTFILE environment variable + """ + fake_dir = tmp_path / "fake_dir" + fake_dir.mkdir() + with patch("os.getcwd", return_value=str(fake_dir)) as cwd_mock: + with patch("os.path.expanduser", return_value=str(tmp_path)) as eu_mock: + salt_subdir = tmp_path / ".salt" + salt_subdir.mkdir() + dest = str(salt_subdir / "Saltfile") + shutil.copy(str(saltfile), dest) + parser.parse_args(base_opts) + assert parser.options.saltfile == dest + cwd_mock.assert_called_once() + eu_mock.assert_called_with("~") From e0e65465860defd0eec7b58b987e59fb59d74f64 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 12 Oct 2023 19:52:31 -0400 Subject: [PATCH 060/196] Add more coverage for the saltfile mixin and ensure passed CLI options take priority --- changelog/65358.fixed.md | 1 + salt/utils/parsers.py | 2 + .../unit/utils/parsers/test_saltfile_mixin.py | 141 ++++++++++++++---- 3 files changed, 119 insertions(+), 25 deletions(-) create mode 100644 changelog/65358.fixed.md diff --git a/changelog/65358.fixed.md b/changelog/65358.fixed.md new file mode 100644 index 00000000000..9a9acc31b4d --- /dev/null +++ b/changelog/65358.fixed.md @@ -0,0 +1 @@ +Ensure CLI options take priority over Saltfile options diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py index 06858c6122f..f3ba1948d89 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -454,6 +454,7 @@ class SaltfileMixIn(metaclass=MixInMeta): if value != default: # The user passed an argument, we won't override it with the # one from Saltfile, if any + cli_config.pop(option.dest) continue # We reached this far! Set the Saltfile value on the option @@ -477,6 +478,7 @@ class SaltfileMixIn(metaclass=MixInMeta): if value != default: # The user passed an argument, we won't override it with # the one from Saltfile, if any + cli_config.pop(option.dest) continue setattr(self.options, option.dest, cli_config[option.dest]) diff --git a/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py index 5ea20aad5ed..fa99f26c081 100644 --- a/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py +++ b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py @@ -7,6 +7,7 @@ import shutil import pytest +import salt.exceptions import salt.utils.parsers from tests.support.helpers import patched_environ from tests.support.mock import patch @@ -52,6 +53,11 @@ class MockSaltfileParser( default=None, help="Write the output to the specified file.", ) + group.add_option( + "--version-arg", + action="version", + help="Option to test no dest", + ) @pytest.fixture @@ -59,14 +65,6 @@ def parser(): return MockSaltfileParser() -# @pytest.fixture -# def parser(): -# # Mock this because we don't need it and it causes an error -# # if there is more than one test being run in this file -# with patch.object(salt.utils.parsers.LogLevelMixIn, "_LogLevelMixIn__setup_logging_routines"): -# yield salt.utils.parsers.SaltCallOptionParser() - - @pytest.fixture def saltfile(tmp_path): fp = tmp_path / "Saltfile" @@ -74,42 +72,53 @@ def saltfile(tmp_path): return fp -@pytest.fixture -def base_opts(): - # return ["--local", "test.ping"] - return [] - - -def test_saltfile_in_environment(parser, saltfile, base_opts): +def test_saltfile_in_environment(parser, saltfile): """ Test setting the SALT_SALTFILE environment variable """ with patched_environ(SALT_SALTFILE=str(saltfile)): - parser.parse_args(base_opts) + parser.parse_args([]) assert parser.options.saltfile == str(saltfile) -def test_saltfile_option(parser, saltfile, base_opts): +def test_saltfile_option(parser, saltfile): """ - Test setting the SALT_SALTFILE environment variable + Test setting the saltfile via the CLI """ - parser.parse_args(base_opts + ["--saltfile", str(saltfile)]) + parser.parse_args(["--saltfile", str(saltfile)]) assert parser.options.saltfile == str(saltfile) -def test_saltfile_cwd(parser, saltfile, base_opts, tmp_path): +def test_bad_saltfile_option(parser, saltfile, tmp_path): """ - Test setting the SALT_SALTFILE environment variable + Test setting a bad saltfile via the CLI + """ + with pytest.raises(SystemExit): + parser.parse_args(["--saltfile", str(tmp_path / "fake_dir")]) + + +def test_saltfile_cwd(parser, saltfile, tmp_path): + """ + Test using a saltfile in the cwd """ with patch("os.getcwd", return_value=str(tmp_path)) as cwd_mock: - parser.parse_args(base_opts) + parser.parse_args([]) assert parser.options.saltfile == str(saltfile) cwd_mock.assert_called_once() -def test_saltfile_user_home(parser, saltfile, base_opts, tmp_path): +def test_saltfile_cwd_doesnt_exist(parser, saltfile, tmp_path): """ - Test setting the SALT_SALTFILE environment variable + Test using a saltfile in the cwd that doesn't exist + """ + with patch("os.getcwd", return_value=str(tmp_path / "fake_dir")) as cwd_mock: + parser.parse_args([]) + assert parser.options.saltfile is None + + +def test_saltfile_user_home(parser, saltfile, tmp_path): + """ + Test using a saltfile in ~/.salt/ """ fake_dir = tmp_path / "fake_dir" fake_dir.mkdir() @@ -119,7 +128,89 @@ def test_saltfile_user_home(parser, saltfile, base_opts, tmp_path): salt_subdir.mkdir() dest = str(salt_subdir / "Saltfile") shutil.copy(str(saltfile), dest) - parser.parse_args(base_opts) + parser.parse_args([]) assert parser.options.saltfile == dest cwd_mock.assert_called_once() eu_mock.assert_called_with("~") + + +def test_bad_saltfile(parser, saltfile): + """ + Test a saltfile with bad configuration + """ + contents = """ + bad "yaml": + - this is: bad yaml + - bad yaml=data: + - {"bad": yaml, "data": "yaml"} + """ + saltfile.write_text(contents) + # It raises two errors, let's catch them both + with pytest.raises(SystemExit): + with pytest.raises(salt.exceptions.SaltConfigurationError): + parser.parse_args(["--saltfile", str(saltfile)]) + + +def test_saltfile_without_prog_name(parser, saltfile): + """ + Test a saltfile with valid yaml but without the program name in it + """ + contents = "good: yaml" + saltfile.write_text(contents) + # This should just run cleanly + parser.parse_args(["--saltfile", str(saltfile)]) + + +def test_saltfile(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + log_level: debug + output: json + """ + saltfile.write_text(contents) + parser.parse_args(["--saltfile", str(saltfile)]) + print(parser.option_list) + assert parser.options.log_level == "debug" + assert parser.options.output == "json" + + +def test_saltfile_unusual_option(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + go: birds + """ + saltfile.write_text(contents) + parser.parse_args(["--saltfile", str(saltfile)]) + assert parser.options.go == "birds" + + +def test_saltfile_cli_override(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + log_level: debug + output: json + output_file: /fake/file + """ + saltfile.write_text(contents) + parser.parse_args( + [ + "--saltfile", + str(saltfile), + "--log-level", + "info", + "--out-file", + "/still/fake/file", + ] + ) + assert parser.options.log_level == "info" + assert parser.options.output == "json" + assert parser.options.output_file == "/still/fake/file" From eda790d4957319cb745fdcba235b5dec55904be4 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 19 Oct 2023 15:26:40 -0400 Subject: [PATCH 061/196] Implement review feedback --- .../unit/utils/parsers/test_daemon_mixin.py | 5 +- .../unit/utils/parsers/test_log_parsers.py | 207 +++++++++--------- 2 files changed, 110 insertions(+), 102 deletions(-) diff --git a/tests/pytests/unit/utils/parsers/test_daemon_mixin.py b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py index 0ecddd9280d..ea835d90e4a 100644 --- a/tests/pytests/unit/utils/parsers/test_daemon_mixin.py +++ b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py @@ -13,8 +13,7 @@ from tests.support.mock import ANY, MagicMock, patch @pytest.fixture def daemon_mixin(): mixin = salt.utils.parsers.DaemonMixIn() - mixin.config = {} - mixin.config["pidfile"] = "/some/fake.pid" + mixin.config = {"pidfile": "/some/fake.pid"} return mixin @@ -26,7 +25,7 @@ def test_pid_file_deletion(daemon_mixin): with patch("os.path.isfile", MagicMock(return_value=True)): with patch("salt.utils.parsers.log", MagicMock()) as log_mock: daemon_mixin._mixin_before_exit() - assert unlink_mock.call_count == 1 + unlink_mock.assert_called_once() log_mock.info.assert_not_called() log_mock.debug.assert_not_called() diff --git a/tests/pytests/unit/utils/parsers/test_log_parsers.py b/tests/pytests/unit/utils/parsers/test_log_parsers.py index 52a0958b10c..2b56ccc0da4 100644 --- a/tests/pytests/unit/utils/parsers/test_log_parsers.py +++ b/tests/pytests/unit/utils/parsers/test_log_parsers.py @@ -14,7 +14,6 @@ import salt.syspaths import salt.utils.jid import salt.utils.parsers import salt.utils.platform -from tests.support.helpers import TstSuiteLoggingHandler from tests.support.mock import MagicMock, patch log = logging.getLogger(__name__) @@ -160,6 +159,12 @@ class LogImplMock: self.setup_extended_logging(opts) self.setup_log_granular_levels(opts["log_granular_levels"]) + def __enter__(self): + return self + + def __exit__(self, *_): + self._destroy() + # <----------- START TESTS -----------> @@ -192,34 +197,44 @@ def log_cli_parser(request): @pytest.fixture def default_config(log_cli_parser): - param_map = { - "master": salt.config.DEFAULT_MASTER_OPTS.copy(), - "minion": salt.config.DEFAULT_MINION_OPTS.copy(), - "proxyminion": { + if log_cli_parser == "master": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "minion": + return salt.config.DEFAULT_MINION_OPTS.copy() + elif log_cli_parser == "proxyminion": + return { **salt.config.DEFAULT_MINION_OPTS.copy(), - **salt.config.DEFAULT_PROXY_MINION_OPTS, - }, - "syndic": salt.config.DEFAULT_MASTER_OPTS.copy(), - "saltcmd": salt.config.DEFAULT_MASTER_OPTS.copy(), - "saltcp": salt.config.DEFAULT_MASTER_OPTS.copy(), - "saltkey": salt.config.DEFAULT_MASTER_OPTS.copy(), - "saltcall": salt.config.DEFAULT_MINION_OPTS.copy(), - "saltrun": salt.config.DEFAULT_MASTER_OPTS.copy(), - "saltssh": salt.config.DEFAULT_MASTER_OPTS.copy(), - "saltcloud": { + **salt.config.DEFAULT_PROXY_MINION_OPTS.copy(), + } + elif log_cli_parser == "syndic": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcmd": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcp": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltkey": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcall": + return salt.config.DEFAULT_MINION_OPTS.copy() + elif log_cli_parser == "saltrun": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltssh": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcloud": + return { **salt.config.DEFAULT_MASTER_OPTS.copy(), - **salt.config.DEFAULT_CLOUD_OPTS, - }, - "spm": { + **salt.config.DEFAULT_CLOUD_OPTS.copy(), + } + elif log_cli_parser == "spm": + return { **salt.config.DEFAULT_MASTER_OPTS.copy(), - **salt.config.DEFAULT_SPM_OPTS, - }, - "saltapi": { + **salt.config.DEFAULT_SPM_OPTS.copy(), + } + elif log_cli_parser == "saltapi": + return { **salt.config.DEFAULT_MASTER_OPTS.copy(), - **salt.config.DEFAULT_API_OPTS, - }, - } - return param_map[log_cli_parser] + **salt.config.DEFAULT_API_OPTS.copy(), + } @pytest.fixture @@ -322,20 +337,19 @@ def log_impl(): """ Mock logger functions """ - _log_impl = LogImplMock() - mocked_functions = {} - for name in dir(_log_impl): - if name.startswith("_"): - continue - func = getattr(_log_impl, name) - if not callable(func): - continue - mocked_functions[name] = func + with LogImplMock() as _log_impl: + mocked_functions = {} + for name in dir(_log_impl): + if name.startswith("_"): + continue + func = getattr(_log_impl, name) + if not callable(func): + continue + mocked_functions[name] = func - patcher = patch.multiple(salt._logging, **mocked_functions) - with patcher: - yield _log_impl - _log_impl._destroy() + patcher = patch.multiple(salt._logging, **mocked_functions) + with patcher: + yield _log_impl def test_get_log_level_cli( @@ -351,11 +365,11 @@ def test_get_log_level_cli( log_level = "critical" args = ["--log-level", log_level] + args - parser = parser() + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) - console_log_level = getattr(parser.options, loglevel_config_setting_name) + console_log_level = getattr(instance.options, loglevel_config_setting_name) # Check console log level setting assert console_log_level == log_level @@ -375,14 +389,13 @@ def test_get_log_level_config( """ # Set log level in config log_level = "info" - opts = testing_config.copy() - opts.update({loglevel_config_setting_name: log_level}) + testing_config.update({loglevel_config_setting_name: log_level}) - parser = parser() - with patch(config_func, MagicMock(return_value=opts)): - parser.parse_args(args) + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) - console_log_level = getattr(parser.options, loglevel_config_setting_name) + console_log_level = getattr(instance.options, loglevel_config_setting_name) # Check console log level setting assert console_log_level == log_level @@ -403,11 +416,11 @@ def test_get_log_level_default( # Set defaults log_level = default_log_level = testing_config[loglevel_config_setting_name] - parser = parser() + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) - console_log_level = getattr(parser.options, loglevel_config_setting_name) + console_log_level = getattr(instance.options, loglevel_config_setting_name) # Check log level setting assert console_log_level == log_level @@ -421,7 +434,7 @@ def test_get_log_level_default( # Check help message assert ( "Default: '{}'.".format(default_log_level) - in parser.get_option("--log-level").help + in instance.get_option("--log-level").help ) @@ -448,11 +461,11 @@ def test_get_log_file_cli( log_file = "{}_cli.log".format(log_file) args = ["--log-file", log_file] + args - parser = parser() + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) - log_file_option = getattr(parser.options, logfile_config_setting_name) + log_file_option = getattr(instance.options, logfile_config_setting_name) # Check console logger assert log_impl.log_level_console == log_level @@ -485,14 +498,13 @@ def test_get_log_file_config( # Set log file in config log_file = "{}_config.log".format(log_file) - opts = testing_config.copy() - opts.update({logfile_config_setting_name: log_file}) + testing_config.update({logfile_config_setting_name: log_file}) - parser = parser() - with patch(config_func, MagicMock(return_value=opts)): - parser.parse_args(args) + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) - log_file_option = getattr(parser.options, logfile_config_setting_name) + log_file_option = getattr(instance.options, logfile_config_setting_name) # Check console logger assert log_impl.log_level_console == log_level @@ -525,11 +537,11 @@ def test_get_log_file_default( log_file = testing_config[logfile_config_setting_name] default_log_file = default_config[logfile_config_setting_name] - parser = parser() + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) - log_file_option = getattr(parser.options, logfile_config_setting_name) + log_file_option = getattr(instance.options, logfile_config_setting_name) # Check console logger assert log_impl.log_level_console == log_level @@ -545,7 +557,7 @@ def test_get_log_file_default( # Check help message assert ( "Default: '{}'.".format(default_log_file) - in parser.get_option("--log-file").help + in instance.get_option("--log-file").help ) @@ -571,12 +583,12 @@ def test_get_log_file_level_cli( log_level_logfile = "error" args = ["--log-file-level", log_level_logfile] + args - parser = parser() + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) log_level_logfile_option = getattr( - parser.options, logfile_loglevel_config_setting_name + instance.options, logfile_loglevel_config_setting_name ) # Check console logger @@ -609,15 +621,14 @@ def test_get_log_file_level_config( # Set log file level in config log_level_logfile = "info" - opts = testing_config.copy() - opts.update({logfile_loglevel_config_setting_name: log_level_logfile}) + testing_config.update({logfile_loglevel_config_setting_name: log_level_logfile}) - parser = parser() - with patch(config_func, MagicMock(return_value=opts)): - parser.parse_args(args) + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) log_level_logfile_option = getattr( - parser.options, logfile_loglevel_config_setting_name + instance.options, logfile_loglevel_config_setting_name ) # Check console logger @@ -651,12 +662,12 @@ def test_get_log_file_level_default( log_level = default_log_level log_level_logfile = default_log_level - parser = parser() + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) log_level_logfile_option = getattr( - parser.options, logfile_loglevel_config_setting_name + instance.options, logfile_loglevel_config_setting_name ) # Check console logger @@ -673,7 +684,7 @@ def test_get_log_file_level_default( # Check help message assert ( "Default: '{}'.".format(default_log_level) - in parser.get_option("--log-file-level").help + in instance.get_option("--log-file-level").help ) @@ -694,15 +705,14 @@ def test_get_console_log_level_with_file_log_level( args = ["--log-file-level", log_level_logfile] + args - opts = testing_config.copy() - opts.update({loglevel_config_setting_name: log_level}) + testing_config.update({loglevel_config_setting_name: log_level}) - parser = parser() - with patch(config_func, MagicMock(return_value=opts)): - parser.parse_args(args) + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) log_level_logfile_option = getattr( - parser.options, logfile_loglevel_config_setting_name + instance.options, logfile_loglevel_config_setting_name ) # Check console logger @@ -724,15 +734,14 @@ def test_log_created( """ Tests that log file is created """ - opts = testing_config.copy() - opts.update({"log_file": str(log_file)}) + testing_config.update({"log_file": str(log_file)}) log_file_name = str(log_file) if log_file_name.split(os.sep)[-1] != "log_file": - opts.update({log_file_name: str(log_file)}) + testing_config.update({log_file_name: str(log_file)}) - parser = parser() - with patch(config_func, MagicMock(return_value=opts)): - parser.parse_args(args) + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) assert os.path.exists(str(log_file_name)) @@ -748,28 +757,28 @@ def test_callbacks_uniqueness(parser): "_mixin_after_parsed_funcs", "_mixin_before_exit_funcs", ) - _parser = parser() + instance = parser() nums_1 = {} for cb_container in mixin_container_names: - obj = getattr(_parser, cb_container) + obj = getattr(instance, cb_container) nums_1[cb_container] = len(obj) # The next time we instantiate the parser, the counts should be equal - _parser = parser() + instance = parser() nums_2 = {} for cb_container in mixin_container_names: - obj = getattr(_parser, cb_container) + obj = getattr(instance, cb_container) nums_2[cb_container] = len(obj) assert nums_1 == nums_2 -def test_verify_log_warning_logged(args, config_func, testing_config, parser): +def test_verify_log_warning_logged(args, config_func, testing_config, parser, caplog): args = ["--log-level", "debug"] + args - with TstSuiteLoggingHandler(level=logging.DEBUG) as handler: - parser = parser() + with caplog.at_level(logging.DEBUG): + instance = parser() with patch(config_func, MagicMock(return_value=testing_config)): - parser.parse_args(args) + instance.parse_args(args) assert ( - "WARNING:Insecure logging configuration detected! Sensitive data may be logged." - in handler.messages + "Insecure logging configuration detected! Sensitive data may be logged." + in caplog.messages ) From b744a4a33447578d5a9291605c765dc980bda091 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 5 Oct 2023 11:10:11 -0600 Subject: [PATCH 062/196] Initial port from unittest to pytest --- tests/pytests/unit/utils/test_network.py | 1341 +++++++++++++++++++++- tests/unit/utils/test_network.py | 1313 --------------------- 2 files changed, 1337 insertions(+), 1317 deletions(-) delete mode 100644 tests/unit/utils/test_network.py diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index c5f976f6749..42078bd571a 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1,8 +1,1341 @@ +import logging +import socket +import textwrap +import time + +import pytest + +import salt.exceptions import salt.utils.network +import salt.utils.network as network +from salt._compat import ipaddress +from tests.support.mock import MagicMock, create_autospec, mock_open, patch + +log = logging.getLogger(__name__) + +LINUX = """\ +eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af + inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0 + inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link + UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 + RX packets:643363 errors:0 dropped:0 overruns:0 frame:0 + TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0 + collisions:0 txqueuelen:1000 + RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB) + +lo Link encap:Local Loopback + inet addr:127.0.0.1 Mask:255.0.0.0 + inet6 addr: ::1/128 Scope:Host + UP LOOPBACK RUNNING MTU:65536 Metric:1 + RX packets:548901 errors:0 dropped:0 overruns:0 frame:0 + TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0 + collisions:0 txqueuelen:0 + RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB) +""" + +FREEBSD = """ +em0: flags=8843 metric 0 mtu 1500 + options=4219b + ether 00:30:48:ff:ff:ff + inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255 + inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63 + media: Ethernet autoselect (1000baseT ) + status: active +em1: flags=8c02 metric 0 mtu 1500 + options=4219b + ether 00:30:48:aa:aa:aa + media: Ethernet autoselect + status: no carrier +plip0: flags=8810 metric 0 mtu 1500 +lo0: flags=8049 metric 0 mtu 16384 + options=3 + inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8 + inet6 ::1 prefixlen 128 + inet 127.0.0.1 netmask 0xff000000 + nd6 options=3 +tun0: flags=8051 metric 0 mtu 1500 + options=80000 + inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff + Opened by PID 1964 +""" + +SOLARIS = """\ +lo0: flags=2001000849 mtu 8232 index 1 + inet 127.0.0.1 netmask ff000000 +net0: flags=100001100943 mtu 1500 index 2 + inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63 +ilbint0: flags=110001100843 mtu 1500 index 3 + inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255 +ilbext0: flags=110001100843 mtu 1500 index 4 + inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31 +ilbext0:1: flags=110001100843 mtu 1500 index 4 + inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31 +vpn0: flags=1000011008d1 mtu 1480 index 5 + inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 + tunnel hop limit 64 + inet 10.6.0.14 --> 10.6.0.15 netmask ff000000 +lo0: flags=2002000849 mtu 8252 index 1 + inet6 ::1/128 +net0: flags=120002004941 mtu 1500 index 2 + inet6 fe80::221:9bff:fefd:2a22/10 +ilbint0: flags=120002000840 mtu 1500 index 3 + inet6 ::/0 +ilbext0: flags=120002000840 mtu 1500 index 4 + inet6 ::/0 +vpn0: flags=120002200850 mtu 1480 index 5 + inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 + tunnel hop limit 64 + inet6 ::/0 --> fe80::b2d6:7c10 +""" + +NETBSD = """\ +vioif0: flags=0x8943 mtu 1500 + ec_capabilities=1 + ec_enabled=0 + address: 00:a0:98:e6:83:18 + inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0 + inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1 +lo0: flags=0x8049 mtu 33624 + inet 127.0.0.1/8 flags 0x0 + inet6 ::1/128 flags 0x20 + inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2 +""" + +FREEBSD_SOCKSTAT = """\ +USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS +root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506 +""" + +FREEBSD_SOCKSTAT_WITH_FAT_PID = """\ +USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS +salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 +""" + +OPENBSD_NETSTAT = """\ +Active Internet connections +Proto Recv-Q Send-Q Local Address Foreign Address (state) +tcp 0 0 127.0.0.1.61115 127.0.0.1.4506 ESTABLISHED +""" + +LINUX_NETLINK_SS_OUTPUT = """\ +State Recv-Q Send-Q Local Address:Port Peer Address:Port +TIME-WAIT 0 0 [::1]:8009 [::1]:40368 +LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* +ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 +ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 +ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 +ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 +""" + +IPV4_SUBNETS = { + True: ("10.10.0.0/24",), + False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"), +} +IPV6_SUBNETS = { + True: ("::1/128",), + False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"), +} -def test_junos_ifconfig_output_parsing(): - ret = salt.utils.network._junos_interfaces_ifconfig( - "inet mtu 0 local=" + " " * 3456 +def test_sanitize_host_ip(): + ret = network.sanitize_host("10.1./2.$3") + assert ret == "10.1.2.3" + + +def test_sanitize_host_name(): + """ + Should not remove the underscore + """ + ret = network.sanitize_host("foo_bar") + assert ret == "foo_bar" + + +def test_host_to_ips(): + """ + NOTE: When this test fails it's usually because the IP address has + changed. In these cases, we just need to update the IP address in the + assertion. + """ + + _side_effect_ipv4 = { + "github.com": [ + (2, 1, 6, "", ("192.30.255.112", 0)), + (2, 1, 6, "", ("192.30.255.113", 0)), + ], + } + + _side_effect_ipv6 = { + "ipv6host.foo": [ + (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), + ], + } + ## getaddrinfo_mock = MagicMock(side_effect=_side_effect) + ## with patch.object(socket, "getaddrinfo", getaddrinfo_mock): + with patch.object(socket, "getaddrinfo", MagicMock(side_effect=_side_effect_ipv4)): + # Test host that can be resolved, ipv4 + ret = network.host_to_ips("github.com") + assert ret == ["192.30.255.112", "192.30.255.113"] + + with patch.object(socket, "getaddrinfo", MagicMock(side_effect=_side_effect_ipv6)): + # Test ipv6 + ret = network.host_to_ips("ipv6host.foo") + assert ret == ["2001:a71::1"] + # Test host that can't be resolved + ret = network.host_to_ips("someothersite.com") + assert ret is None + + +def test_generate_minion_id(): + assert network.generate_minion_id() + + +def test__generate_minion_id_with_unicode_in_etc_hosts(): + """ + Test that unicode in /etc/hosts doesn't raise an error when + _generate_minion_id() helper is called to gather the hosts. + """ + content = textwrap.dedent( + """\ + # 以下为主机名解析 + ## ccc + 127.0.0.1 localhost thisismyhostname # 本机 + """ ) - assert ret == {"inet": {"up": False}} + fopen_mock = mock_open(read_data={"/etc/hosts": content}) + with patch("salt.utils.files.fopen", fopen_mock): + assert "thisismyhostname" in network._generate_minion_id() + + +def test_is_ip(): + assert network.is_ip("10.10.0.3") + assert not network.is_ip("0.9.800.1000") + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + assert not network.is_ipv6("sixteen-char-str") + + +def test_is_ipv4(): + assert network.is_ipv4("10.10.0.3") + assert not network.is_ipv4("10.100.1") + assert not network.is_ipv4("2001:db8:0:1:1:1:1:1") + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + assert not network.is_ipv4("sixteen-char-str") + + +def test_is_ipv6(): + assert network.is_ipv6("2001:db8:0:1:1:1:1:1") + assert network.is_ipv6("0:0:0:0:0:0:0:1") + assert network.is_ipv6("::1") + assert network.is_ipv6("::") + assert network.is_ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334") + assert network.is_ipv6("2001:0db8:85a3::8a2e:0370:7334") + assert not network.is_ipv6("2001:0db8:0370:7334") + assert not network.is_ipv6("2001:0db8:::0370:7334") + assert not network.is_ipv6("10.0.1.2") + assert not network.is_ipv6("2001.0db8.85a3.0000.0000.8a2e.0370.7334") + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + assert not network.is_ipv6("sixteen-char-str") + + +def test_ipv6(): + assert network.ipv6("2001:db8:0:1:1:1:1:1") + assert network.ipv6("0:0:0:0:0:0:0:1") + assert network.ipv6("::1") + assert network.ipv6("::") + assert network.ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334") + assert network.ipv6("2001:0db8:85a3::8a2e:0370:7334") + assert network.ipv6("2001:67c:2e8::/48") + + +def test_is_loopback(): + assert network.is_loopback("127.0.1.1") + assert network.is_loopback("::1") + assert not network.is_loopback("10.0.1.2") + assert not network.is_loopback("2001:db8:0:1:1:1:1:1") + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + assert not network.is_ipv6("sixteen-char-str") + + +def test_parse_host_port(): + _ip = ipaddress.ip_address + good_host_ports = { + "10.10.0.3": (_ip("10.10.0.3").compressed, None), + "10.10.0.3:1234": (_ip("10.10.0.3").compressed, 1234), + "2001:0db8:85a3::8a2e:0370:7334": ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + None, + ), + "[2001:0db8:85a3::8a2e:0370:7334]:1234": ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + 1234, + ), + "2001:0db8:85a3::7334": (_ip("2001:0db8:85a3::7334").compressed, None), + "[2001:0db8:85a3::7334]:1234": ( + _ip("2001:0db8:85a3::7334").compressed, + 1234, + ), + } + bad_host_ports = [ + "10.10.0.3/24", + "10.10.0.3::1234", + "2001:0db8:0370:7334", + "2001:0db8:0370::7334]:1234", + "2001:0db8:0370:0:a:b:c:d:1234", + "host name", + "host name:1234", + "10.10.0.3:abcd", + ] + for host_port, assertion_value in good_host_ports.items(): + host = port = None + host, port = network.parse_host_port(host_port) + assert (host, port) == assertion_value + + for host_port in bad_host_ports: + try: + pytest.raises(ValueError, network.parse_host_port, host_port) + except AssertionError as _e_: + log.error( + 'bad host_port value: "%s" failed to trigger ValueError exception', + host_port, + ) + raise _e_ + + +def test_dns_check(): + hosts = [ + { + "host": "10.10.0.3", + "port": "", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + }, + { + "host": "10.10.0.3", + "port": "1234", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + }, + { + "host": "2001:0db8:85a3::8a2e:0370:7334", + "port": "", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + }, + { + "host": "2001:0db8:85a3::8a2e:370:7334", + "port": "1234", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + }, + { + "host": "salt-master", + "port": "1234", + "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], + "ret": "127.0.0.1", + }, + ] + for host in hosts: + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, return_value=host["mocked"]), + ): + with patch("socket.socket", create_autospec(socket.socket)): + ret = network.dns_check(host["host"], host["port"]) + assert ret == host["ret"] + + +def test_dns_check_ipv6_filter(): + # raise exception to skip everything after the getaddrinfo call + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, side_effect=Exception), + ) as getaddrinfo: + for ipv6, param in [ + (None, socket.AF_UNSPEC), + (True, socket.AF_INET6), + (False, socket.AF_INET), + ]: + with pytest.raises(Exception): + network.dns_check("foo", "1", ipv6=ipv6) + getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM) + + +def test_dns_check_errors(): + with patch.object( + socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[]) + ): + with pytest.raises( + salt.exceptions.SaltSystemExit, + match="DNS lookup or connection check of 'foo' failed.", + ) as exc_info: + network.dns_check("foo", "1") + + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, side_effect=TypeError), + ): + with pytest.raises( + salt.exceptions.SaltSystemExit, match="Invalid or unresolveable address" + ) as exc_info2: + network.dns_check("foo", "1") + + +def test_test_addrs(): + # subset of real data from getaddrinfo against saltstack.com + addrinfo = [ + (30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), + (30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), + (30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), + (30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), + (2, 1, 6, "", ("13.35.99.52", 0)), + (2, 2, 17, "", ("13.35.99.85", 0)), + (2, 1, 6, "", ("13.35.99.85", 0)), + (2, 2, 17, "", ("13.35.99.122", 0)), + ] + with patch("socket.socket", create_autospec(socket.socket)) as s: + # we connect to the first address + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 1 + assert addrs[0] == addrinfo[0][4][0] + + # the first lookup fails, succeeds on next check + s.side_effect = [socket.error, MagicMock()] + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 1 + assert addrs[0] == addrinfo[2][4][0] + + # attempt to connect to resolved address with default timeout + s.side_effect = socket.error + addrs = network._test_addrs(addrinfo, 80) + time.sleep(2) + assert not len(addrs) == 0 + + # nothing can connect, but we've eliminated duplicates + s.side_effect = socket.error + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 5 + + +def test_is_subnet(): + for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS): + for item in subnet_data[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_subnet(item) + for item in subnet_data[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_subnet(item) + + +def test_is_ipv4_subnet(): + for item in IPV4_SUBNETS[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_ipv4_subnet(item) + for item in IPV4_SUBNETS[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_ipv4_subnet(item) + + +def test_is_ipv6_subnet(): + for item in IPV6_SUBNETS[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_ipv6_subnet(item) + for item in IPV6_SUBNETS[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_ipv6_subnet(item) + + +def test_cidr_to_ipv4_netmask(): + assert network.cidr_to_ipv4_netmask(24) == "255.255.255.0" + assert network.cidr_to_ipv4_netmask(21) == "255.255.248.0" + assert network.cidr_to_ipv4_netmask(17) == "255.255.128.0" + assert network.cidr_to_ipv4_netmask(9) == "255.128.0.0" + assert network.cidr_to_ipv4_netmask(36) == "" + assert network.cidr_to_ipv4_netmask("lol") == "" + + +def test_number_of_set_bits_to_ipv4_netmask(): + set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) + assert set_bits_to_netmask == "255.255.255.0" + set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) + + +def test_hex2ip(): + assert network.hex2ip("0x4A7D2B63") == "74.125.43.99" + assert network.hex2ip("0x4A7D2B63", invert=True) == "99.43.125.74" + assert network.hex2ip("00000000000000000000FFFF7F000001") == "127.0.0.1" + assert ( + network.hex2ip("0000000000000000FFFF00000100007F", invert=True) == "127.0.0.1" + ) + assert network.hex2ip("20010DB8000000000000000000000000") == "2001:db8::" + assert ( + network.hex2ip("B80D0120000000000000000000000000", invert=True) == "2001:db8::" + ) + + +def test_interfaces_ifconfig_linux(): + interfaces = network._interfaces_ifconfig(LINUX) + assert interfaces == { + "eth0": { + "hwaddr": "e0:3f:49:85:6a:af", + "inet": [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ], + "inet6": [ + { + "address": "fe80::e23f:49ff:fe85:6aaf", + "prefixlen": "64", + "scope": "link", + } + ], + "up": True, + }, + "lo": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], + "up": True, + }, + } + + +def test_interfaces_ifconfig_freebsd(): + interfaces = network._interfaces_ifconfig(FREEBSD) + assert interfaces == { + "": {"up": False}, + "em0": { + "hwaddr": "00:30:48:ff:ff:ff", + "inet": [ + { + "address": "10.10.10.250", + "broadcast": "10.10.10.255", + "netmask": "255.255.255.224", + }, + { + "address": "10.10.10.56", + "broadcast": "10.10.10.63", + "netmask": "255.255.255.192", + }, + ], + "up": True, + }, + "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [ + {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, + {"address": "::1", "prefixlen": "128", "scope": None}, + ], + "up": True, + }, + "plip0": {"up": False}, + "tun0": { + "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], + "up": True, + }, + } + + +def test_interfaces_ifconfig_solaris(): + with patch("salt.utils.platform.is_sunos", lambda: True): + interfaces = network._interfaces_ifconfig(SOLARIS) + expected_interfaces = { + "ilbint0": { + "inet6": [], + "inet": [ + { + "broadcast": "10.6.0.255", + "netmask": "255.255.255.0", + "address": "10.6.0.11", + } + ], + "up": True, + }, + "lo0": { + "inet6": [{"prefixlen": "128", "address": "::1"}], + "inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}], + "up": True, + }, + "ilbext0": { + "inet6": [], + "inet": [ + { + "broadcast": "10.10.11.31", + "netmask": "255.255.255.224", + "address": "10.10.11.11", + }, + { + "broadcast": "10.10.11.31", + "netmask": "255.255.255.224", + "address": "10.10.11.12", + }, + ], + "up": True, + }, + "vpn0": { + "inet6": [], + "inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}], + "up": True, + }, + "net0": { + "inet6": [{"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"}], + "inet": [ + { + "broadcast": "10.10.10.63", + "netmask": "255.255.255.224", + "address": "10.10.10.38", + } + ], + "up": True, + }, + } + assert interfaces == expected_interfaces + + +def test_interfaces_ifconfig_netbsd(): + interfaces = network._netbsd_interfaces_ifconfig(NETBSD) + assert interfaces == { + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "fe80::1", "prefixlen": "64", "scope": "lo0"}], + "up": True, + }, + "vioif0": { + "hwaddr": "00:a0:98:e6:83:18", + "inet": [ + { + "address": "192.168.1.80", + "broadcast": "192.168.1.255", + "netmask": "255.255.255.0", + } + ], + "inet6": [ + { + "address": "fe80::2a0:98ff:fee6:8318", + "prefixlen": "64", + "scope": "vioif0", + } + ], + "up": True, + }, + } + + +def test_freebsd_remotes_on(): + with patch("salt.utils.platform.is_sunos", lambda: False): + with patch("salt.utils.platform.is_freebsd", lambda: True): + with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): + remotes = network._freebsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_freebsd_remotes_on_with_fat_pid(): + with patch("salt.utils.platform.is_sunos", lambda: False): + with patch("salt.utils.platform.is_freebsd", lambda: True): + with patch( + "subprocess.check_output", + return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, + ): + remotes = network._freebsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_netlink_tool_remote_on_a(): + with patch("salt.utils.platform.is_sunos", lambda: False): + with patch("salt.utils.platform.is_linux", lambda: True): + with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4506", "local_port") + assert remotes == {"192.168.122.177", "::ffff:127.0.0.1"} + + +def test_netlink_tool_remote_on_b(): + with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4505", "remote_port") + assert remotes == {"127.0.0.1", "::ffff:1.2.3.4"} + + +def test_openbsd_remotes_on(): + with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT): + remotes = network._openbsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_openbsd_remotes_on_issue_61966(): + """ + Test that the command output is correctly converted to string before + treating it as such + """ + with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT.encode()): + remotes = network._openbsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_generate_minion_id_distinct(): + """ + Test if minion IDs are distinct in the pool. + + :return: + """ + with patch("platform.node", MagicMock(return_value="nodename")), patch( + "socket.gethostname", MagicMock(return_value="hostname") + ), patch( + "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "hostname.domainname.blank", + "nodename", + "hostname", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_127_name(): + """ + Test if minion IDs can be named 127.foo + + :return: + """ + with patch("platform.node", MagicMock(return_value="127")), patch( + "socket.gethostname", MagicMock(return_value="127") + ), patch("socket.getfqdn", MagicMock(return_value="127.domainname.blank")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "127.domainname.blank", + "127", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_127_name_startswith(): + """ + Test if minion IDs can be named starting from "127" + + :return: + """ + with patch("platform.node", MagicMock(return_value="127890")), patch( + "socket.gethostname", MagicMock(return_value="127890") + ), patch( + "socket.getfqdn", MagicMock(return_value="127890.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "127890.domainname.blank", + "127890", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_duplicate(): + """ + Test if IP addresses in the minion IDs are distinct in the pool + + :return: + """ + with patch("platform.node", MagicMock(return_value="hostname")), patch( + "socket.gethostname", MagicMock(return_value="hostname") + ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network._generate_minion_id() == ["hostname", "1.2.3.4"] + + +def test_generate_minion_id_platform_used(): + """ + Test if platform.node is used for the first occurrence. + The platform.node is most common hostname resolver before anything else. + + :return: + """ + with patch( + "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") + ), patch("socket.gethostname", MagicMock(return_value="hostname")), patch( + "socket.getfqdn", MagicMock(return_value="") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "very.long.and.complex.domain.name" + + +def test_generate_minion_id_platform_localhost_filtered(): + """ + Test if localhost is filtered from the first occurrence. + + :return: + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="pick.me") + ), patch( + "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "hostname.domainname.blank" + + +## def test_generate_minion_id_platform_localhost_filtered_all(): +## """ +## Test if any of the localhost is filtered from everywhere. +## +## :return: +## """ +## with patch("platform.node", MagicMock(return_value="localhost")), patch( +## "socket.gethostname", MagicMock(return_value="ip6-loopback") +## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( +## "socket.getaddrinfo", +## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), +## ), patch( +## "salt.utils.files.fopen", mock_open() +## ), patch( +## "salt.utils.network.ip_addrs", +## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), +## ): +## assert network.generate_minion_id() == "1.2.3.4" +## +## +## def test_generate_minion_id_platform_localhost_only(): +## """ +## Test if there is no other choice but localhost. +## +## :return: +## """ +## with patch("platform.node", MagicMock(return_value="localhost")), patch( +## "socket.gethostname", MagicMock(return_value="ip6-loopback") +## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( +## "socket.getaddrinfo", +## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), +## ), patch( +## "salt.utils.files.fopen", mock_open() +## ), patch( +## "salt.utils.network.ip_addrs", +## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), +## ): +## assert network.generate_minion_id() == "localhost" +## +## +## def test_generate_minion_id_platform_fqdn(): +## """ +## Test if fqdn is picked up. +## +## :return: +## """ +## with patch("platform.node", MagicMock(return_value="localhost")), patch( +## "socket.gethostname", MagicMock(return_value="ip6-loopback") +## ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( +## "socket.getaddrinfo", +## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), +## ), patch( +## "salt.utils.files.fopen", mock_open() +## ), patch( +## "salt.utils.network.ip_addrs", +## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), +## ): +## assert network.generate_minion_id() == "pick.me" +## +## +## def test_generate_minion_id_platform_localhost_addrinfo(): +## """ +## Test if addinfo is picked up. +## +## :return: +## """ +## with patch("platform.node", MagicMock(return_value="localhost")), patch( +## "socket.gethostname", MagicMock(return_value="ip6-loopback") +## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( +## "socket.getaddrinfo", +## MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), +## ), patch( +## "salt.utils.files.fopen", mock_open() +## ), patch( +## "salt.utils.network.ip_addrs", +## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), +## ): +## assert network.generate_minion_id() == "pick.me" +## +## +## def test_generate_minion_id_platform_ip_addr_only(): +## """ +## Test if IP address is the only what is used as a Minion ID in case no DNS name. +## +## :return: +## """ +## with patch("platform.node", MagicMock(return_value="localhost")), patch( +## "socket.gethostname", MagicMock(return_value="ip6-loopback") +## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( +## "socket.getaddrinfo", +## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), +## ), patch( +## "salt.utils.files.fopen", mock_open() +## ), patch( +## "salt.utils.network.ip_addrs", +## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), +## ): +## assert network.generate_minion_id() == "1.2.3.4" +## +## +## def test_gen_mac(): +## with patch("random.randint", return_value=1) as random_mock: +## assert random_mock.return_value == 1 +## ret = network.gen_mac("00:16:3E") +## expected_mac = "00:16:3E:01:01:01" +## assert ret == expected_mac +## +## +## def test_mac_str_to_bytes(): +## pytest.raises(ValueError, network.mac_str_to_bytes, "31337") +## pytest.raises(ValueError, network.mac_str_to_bytes, "0001020304056") +## pytest.raises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") +## pytest.raises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") +## assert b"\x10\x08\x06\x04\x02\x00" == network.mac_str_to_bytes("100806040200") +## assert b"\xf8\xe7\xd6\xc5\xb4\xa3" == network.mac_str_to_bytes("f8e7d6c5b4a3") +## +## +## @pytest.mark.slow_test +## def test_generate_minion_id_with_long_hostname(): +## """ +## Validate the fix for: +## +## https://github.com/saltstack/salt/issues/51160 +## """ +## long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" +## with patch("socket.gethostname", MagicMock(return_value=long_name)): +## # An exception is raised if unicode is passed to socket.getfqdn +## minion_id = network.generate_minion_id() +## assert minion_id != "", minion_id +## +## +## def test_filter_by_networks_with_no_filter(): +## ips = ["10.0.123.200", "10.10.10.10"] +## with pytest.raises(TypeError): +## network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter +## +## +## def test_filter_by_networks_empty_filter(): +## ips = ["10.0.123.200", "10.10.10.10"] +## assert network.filter_by_networks(ips, []) == [] +## +## +## def test_filter_by_networks_ips_list(): +## ips = [ +## "10.0.123.200", +## "10.10.10.10", +## "193.124.233.5", +## "fe80::d210:cf3f:64e7:5423", +## ] +## networks = ["10.0.0.0/8", "fe80::/64"] +## assert network.filter_by_networks(ips, networks) == [ +## "10.0.123.200", +## "10.10.10.10", +## "fe80::d210:cf3f:64e7:5423", +## ] +## +## +## def test_filter_by_networks_interfaces_dict(): +## interfaces = { +## "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], +## "eth0": [ +## "2001:0DB8:0:CD30:123:4567:89AB:CDEF", +## "192.168.1.101", +## "10.0.123.201", +## ], +## } +## assert network.filter_by_networks( +## interfaces, ["192.168.1.0/24", "2001:db8::/48"] +## ) == { +## "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], +## "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], +## } +## +## +## def test_filter_by_networks_catch_all(): +## ips = [ +## "10.0.123.200", +## "10.10.10.10", +## "193.124.233.5", +## "fe80::d210:cf3f:64e7:5423", +## ] +## assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) +## +## +## def test_ip_networks(): +## # We don't need to test with each platform's ifconfig/iproute2 output, +## # since this test isn't testing getting the interfaces. We already have +## # tests for that. +## interface_data = network._interfaces_ifconfig(LINUX) +## +## # Without loopback +## ret = network.ip_networks(interface_data=interface_data) +## assert ret == ["10.10.8.0/22"], ret +## # Without loopback, specific interface +## ret = network.ip_networks(interface="eth0", interface_data=interface_data) +## assert ret == ["10.10.8.0/22"], ret +## # Without loopback, multiple specific interfaces +## ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) +## assert ret == ["10.10.8.0/22"], ret +## # Without loopback, specific interface (not present) +## ret = network.ip_networks(interface="eth1", interface_data=interface_data) +## assert ret == [], ret +## # With loopback +## ret = network.ip_networks(include_loopback=True, interface_data=interface_data) +## assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret +## # With loopback, specific interface +## ret = network.ip_networks( +## interface="eth0", include_loopback=True, interface_data=interface_data +## ) +## assert ret == ["10.10.8.0/22"], ret +## # With loopback, multiple specific interfaces +## ret = network.ip_networks( +## interface="eth0,lo", include_loopback=True, interface_data=interface_data +## ) +## assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret +## # With loopback, specific interface (not present) +## ret = network.ip_networks( +## interface="eth1", include_loopback=True, interface_data=interface_data +## ) +## assert ret == [], ret +## +## # Verbose, without loopback +## ret = network.ip_networks(verbose=True, interface_data=interface_data) +## assert ret == { +## "10.10.8.0/22": { +## "prefixlen": 22, +## "netmask": "255.255.252.0", +## "num_addresses": 1024, +## "address": "10.10.8.0", +## }, +## }, ret +## # Verbose, without loopback, specific interface +## ret = network.ip_networks( +## interface="eth0", verbose=True, interface_data=interface_data +## ) +## assert ret == { +## "10.10.8.0/22": { +## "prefixlen": 22, +## "netmask": "255.255.252.0", +## "num_addresses": 1024, +## "address": "10.10.8.0", +## }, +## }, ret +## # Verbose, without loopback, multiple specific interfaces +## ret = network.ip_networks( +## interface="eth0,lo", verbose=True, interface_data=interface_data +## ) +## assert ret == { +## "10.10.8.0/22": { +## "prefixlen": 22, +## "netmask": "255.255.252.0", +## "num_addresses": 1024, +## "address": "10.10.8.0", +## }, +## }, ret +## # Verbose, without loopback, specific interface (not present) +## ret = network.ip_networks( +## interface="eth1", verbose=True, interface_data=interface_data +## ) +## assert ret == {}, ret +## # Verbose, with loopback +## ret = network.ip_networks( +## include_loopback=True, verbose=True, interface_data=interface_data +## ) +## assert ret == { +## "10.10.8.0/22": { +## "prefixlen": 22, +## "netmask": "255.255.252.0", +## "num_addresses": 1024, +## "address": "10.10.8.0", +## }, +## "127.0.0.0/8": { +## "prefixlen": 8, +## "netmask": "255.0.0.0", +## "num_addresses": 16777216, +## "address": "127.0.0.0", +## }, +## }, ret +## # Verbose, with loopback, specific interface +## ret = network.ip_networks( +## interface="eth0", +## include_loopback=True, +## verbose=True, +## interface_data=interface_data, +## ) +## assert ret == { +## "10.10.8.0/22": { +## "prefixlen": 22, +## "netmask": "255.255.252.0", +## "num_addresses": 1024, +## "address": "10.10.8.0", +## }, +## }, ret +## # Verbose, with loopback, multiple specific interfaces +## ret = network.ip_networks( +## interface="eth0,lo", +## include_loopback=True, +## verbose=True, +## interface_data=interface_data, +## ) +## assert ret == { +## "10.10.8.0/22": { +## "prefixlen": 22, +## "netmask": "255.255.252.0", +## "num_addresses": 1024, +## "address": "10.10.8.0", +## }, +## "127.0.0.0/8": { +## "prefixlen": 8, +## "netmask": "255.0.0.0", +## "num_addresses": 16777216, +## "address": "127.0.0.0", +## }, +## }, ret +## # Verbose, with loopback, specific interface (not present) +## ret = network.ip_networks( +## interface="eth1", +## include_loopback=True, +## verbose=True, +## interface_data=interface_data, +## ) +## assert ret == {}, ret +## +## +## def test_ip_networks6(): +## # We don't need to test with each platform's ifconfig/iproute2 output, +## # since this test isn't testing getting the interfaces. We already have +## # tests for that. +## interface_data = network._interfaces_ifconfig(LINUX) +## +## # Without loopback +## ret = network.ip_networks6(interface_data=interface_data) +## assert ret == ["fe80::/64"], ret +## # Without loopback, specific interface +## ret = network.ip_networks6(interface="eth0", interface_data=interface_data) +## assert ret == ["fe80::/64"], ret +## # Without loopback, multiple specific interfaces +## ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) +## assert ret == ["fe80::/64"], ret +## # Without loopback, specific interface (not present) +## ret = network.ip_networks6(interface="eth1", interface_data=interface_data) +## assert ret == [], ret +## # With loopback +## ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) +## assert ret == ["::1/128", "fe80::/64"], ret +## # With loopback, specific interface +## ret = network.ip_networks6( +## interface="eth0", include_loopback=True, interface_data=interface_data +## ) +## assert ret == ["fe80::/64"], ret +## # With loopback, multiple specific interfaces +## ret = network.ip_networks6( +## interface="eth0,lo", include_loopback=True, interface_data=interface_data +## ) +## assert ret == ["::1/128", "fe80::/64"], ret +## # With loopback, specific interface (not present) +## ret = network.ip_networks6( +## interface="eth1", include_loopback=True, interface_data=interface_data +## ) +## assert ret == [], ret +## +## # Verbose, without loopback +## ret = network.ip_networks6(verbose=True, interface_data=interface_data) +## assert ret == { +## "fe80::/64": { +## "prefixlen": 64, +## "netmask": "ffff:ffff:ffff:ffff::", +## "num_addresses": 18446744073709551616, +## "address": "fe80::", +## }, +## }, ret +## # Verbose, without loopback, specific interface +## ret = network.ip_networks6( +## interface="eth0", verbose=True, interface_data=interface_data +## ) +## assert ret == { +## "fe80::/64": { +## "prefixlen": 64, +## "netmask": "ffff:ffff:ffff:ffff::", +## "num_addresses": 18446744073709551616, +## "address": "fe80::", +## }, +## }, ret +## # Verbose, without loopback, multiple specific interfaces +## ret = network.ip_networks6( +## interface="eth0,lo", verbose=True, interface_data=interface_data +## ) +## assert ret == { +## "fe80::/64": { +## "prefixlen": 64, +## "netmask": "ffff:ffff:ffff:ffff::", +## "num_addresses": 18446744073709551616, +## "address": "fe80::", +## }, +## }, ret +## # Verbose, without loopback, specific interface (not present) +## ret = network.ip_networks6( +## interface="eth1", verbose=True, interface_data=interface_data +## ) +## assert ret == {}, ret +## # Verbose, with loopback +## ret = network.ip_networks6( +## include_loopback=True, verbose=True, interface_data=interface_data +## ) +## assert ret == { +## "fe80::/64": { +## "prefixlen": 64, +## "netmask": "ffff:ffff:ffff:ffff::", +## "num_addresses": 18446744073709551616, +## "address": "fe80::", +## }, +## "::1/128": { +## "prefixlen": 128, +## "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", +## "num_addresses": 1, +## "address": "::1", +## }, +## }, ret +## # Verbose, with loopback, specific interface +## ret = network.ip_networks6( +## interface="eth0", +## include_loopback=True, +## verbose=True, +## interface_data=interface_data, +## ) +## assert ret == { +## "fe80::/64": { +## "prefixlen": 64, +## "netmask": "ffff:ffff:ffff:ffff::", +## "num_addresses": 18446744073709551616, +## "address": "fe80::", +## }, +## }, ret +## # Verbose, with loopback, multiple specific interfaces +## ret = network.ip_networks6( +## interface="eth0,lo", +## include_loopback=True, +## verbose=True, +## interface_data=interface_data, +## ) +## assert ret == { +## "fe80::/64": { +## "prefixlen": 64, +## "netmask": "ffff:ffff:ffff:ffff::", +## "num_addresses": 18446744073709551616, +## "address": "fe80::", +## }, +## "::1/128": { +## "prefixlen": 128, +## "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", +## "num_addresses": 1, +## "address": "::1", +## }, +## }, ret +## # Verbose, with loopback, specific interface (not present) +## ret = network.ip_networks6( +## interface="eth1", +## include_loopback=True, +## verbose=True, +## interface_data=interface_data, +## ) +## assert ret == {}, ret +## +## +## def test_get_fqhostname_return(): +## """ +## Test if proper hostname is used when RevDNS differ from hostname +## +## :return: +## """ +## with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( +## "socket.getfqdn", +## MagicMock(return_value="very.long.and.complex.domain.name"), +## ), patch( +## "socket.getaddrinfo", +## MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), +## ): +## assert network.get_fqhostname() == "hostname" +## +## +## def test_get_fqhostname_return_empty_hostname(): +## """ +## Test if proper hostname is used when hostname returns empty string +## """ +## host = "hostname" +## with patch("socket.gethostname", MagicMock(return_value=host)), patch( +## "socket.getfqdn", +## MagicMock(return_value="very.long.and.complex.domain.name"), +## ), patch( +## "socket.getaddrinfo", +## MagicMock( +## return_value=[ +## (2, 3, 0, host, ("127.0.1.1", 0)), +## (2, 3, 0, "", ("127.0.1.1", 0)), +## ] +## ), +## ): +## assert network.get_fqhostname() == host +## +## +## def test_ip_bracket(): +## test_ipv4 = "127.0.0.1" +## test_ipv6 = "::1" +## test_ipv6_uri = "[::1]" +## assert test_ipv4 == network.ip_bracket(test_ipv4) +## assert test_ipv6 == network.ip_bracket(test_ipv6_uri, strip=True) +## assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6) +## assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6_uri) +## +## ip_addr_obj = ipaddress.ip_address(test_ipv4) +## assert test_ipv4 == network.ip_bracket(ip_addr_obj) +## +## +## def test_junos_ifconfig_output_parsing(): +## ret = network._junos_interfaces_ifconfig("inet mtu 0 local=" + " " * 3456) +## assert ret == {"inet": {"up": False}} diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py deleted file mode 100644 index f7d39729300..00000000000 --- a/tests/unit/utils/test_network.py +++ /dev/null @@ -1,1313 +0,0 @@ -import logging -import socket -import textwrap -import time - -import pytest - -import salt.exceptions -import salt.utils.network as network -from salt._compat import ipaddress -from tests.support.mock import MagicMock, create_autospec, mock_open, patch -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - -LINUX = """\ -eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af - inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0 - inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link - UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 - RX packets:643363 errors:0 dropped:0 overruns:0 frame:0 - TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:1000 - RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB) - -lo Link encap:Local Loopback - inet addr:127.0.0.1 Mask:255.0.0.0 - inet6 addr: ::1/128 Scope:Host - UP LOOPBACK RUNNING MTU:65536 Metric:1 - RX packets:548901 errors:0 dropped:0 overruns:0 frame:0 - TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:0 - RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB) -""" - -FREEBSD = """ -em0: flags=8843 metric 0 mtu 1500 - options=4219b - ether 00:30:48:ff:ff:ff - inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255 - inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63 - media: Ethernet autoselect (1000baseT ) - status: active -em1: flags=8c02 metric 0 mtu 1500 - options=4219b - ether 00:30:48:aa:aa:aa - media: Ethernet autoselect - status: no carrier -plip0: flags=8810 metric 0 mtu 1500 -lo0: flags=8049 metric 0 mtu 16384 - options=3 - inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8 - inet6 ::1 prefixlen 128 - inet 127.0.0.1 netmask 0xff000000 - nd6 options=3 -tun0: flags=8051 metric 0 mtu 1500 - options=80000 - inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff - Opened by PID 1964 -""" - -SOLARIS = """\ -lo0: flags=2001000849 mtu 8232 index 1 - inet 127.0.0.1 netmask ff000000 -net0: flags=100001100943 mtu 1500 index 2 - inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63 -ilbint0: flags=110001100843 mtu 1500 index 3 - inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255 -ilbext0: flags=110001100843 mtu 1500 index 4 - inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31 -ilbext0:1: flags=110001100843 mtu 1500 index 4 - inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31 -vpn0: flags=1000011008d1 mtu 1480 index 5 - inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 - tunnel hop limit 64 - inet 10.6.0.14 --> 10.6.0.15 netmask ff000000 -lo0: flags=2002000849 mtu 8252 index 1 - inet6 ::1/128 -net0: flags=120002004941 mtu 1500 index 2 - inet6 fe80::221:9bff:fefd:2a22/10 -ilbint0: flags=120002000840 mtu 1500 index 3 - inet6 ::/0 -ilbext0: flags=120002000840 mtu 1500 index 4 - inet6 ::/0 -vpn0: flags=120002200850 mtu 1480 index 5 - inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 - tunnel hop limit 64 - inet6 ::/0 --> fe80::b2d6:7c10 -""" - -NETBSD = """\ -vioif0: flags=0x8943 mtu 1500 - ec_capabilities=1 - ec_enabled=0 - address: 00:a0:98:e6:83:18 - inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0 - inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1 -lo0: flags=0x8049 mtu 33624 - inet 127.0.0.1/8 flags 0x0 - inet6 ::1/128 flags 0x20 - inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2 -""" - -FREEBSD_SOCKSTAT = """\ -USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS -root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506 -""" - -FREEBSD_SOCKSTAT_WITH_FAT_PID = """\ -USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS -salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 -""" - -OPENBSD_NETSTAT = """\ -Active Internet connections -Proto Recv-Q Send-Q Local Address Foreign Address (state) -tcp 0 0 127.0.0.1.61115 127.0.0.1.4506 ESTABLISHED -""" - -LINUX_NETLINK_SS_OUTPUT = """\ -State Recv-Q Send-Q Local Address:Port Peer Address:Port -TIME-WAIT 0 0 [::1]:8009 [::1]:40368 -LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* -ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 -ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 -ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 -ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 -""" - -IPV4_SUBNETS = { - True: ("10.10.0.0/24",), - False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"), -} -IPV6_SUBNETS = { - True: ("::1/128",), - False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"), -} - - -class NetworkTestCase(TestCase): - def test_sanitize_host_ip(self): - ret = network.sanitize_host("10.1./2.$3") - self.assertEqual(ret, "10.1.2.3") - - def test_sanitize_host_name(self): - """ - Should not remove the underscore - """ - ret = network.sanitize_host("foo_bar") - self.assertEqual(ret, "foo_bar") - - def test_host_to_ips(self): - """ - NOTE: When this test fails it's usually because the IP address has - changed. In these cases, we just need to update the IP address in the - assertion. - """ - - def _side_effect(host, *args): - try: - return { - "github.com": [ - (2, 1, 6, "", ("192.30.255.112", 0)), - (2, 1, 6, "", ("192.30.255.113", 0)), - ], - "ipv6host.foo": [ - (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), - ], - }[host] - except KeyError: - raise socket.gaierror(-2, "Name or service not known") - - getaddrinfo_mock = MagicMock(side_effect=_side_effect) - with patch.object(socket, "getaddrinfo", getaddrinfo_mock): - # Test host that can be resolved - ret = network.host_to_ips("github.com") - self.assertEqual(ret, ["192.30.255.112", "192.30.255.113"]) - # Test ipv6 - ret = network.host_to_ips("ipv6host.foo") - self.assertEqual(ret, ["2001:a71::1"]) - # Test host that can't be resolved - ret = network.host_to_ips("someothersite.com") - self.assertEqual(ret, None) - - def test_generate_minion_id(self): - self.assertTrue(network.generate_minion_id()) - - def test__generate_minion_id_with_unicode_in_etc_hosts(self): - """ - Test that unicode in /etc/hosts doesn't raise an error when - _generate_minion_id() helper is called to gather the hosts. - """ - content = textwrap.dedent( - """\ - # 以下为主机名解析 - ## ccc - 127.0.0.1 localhost thisismyhostname # 本机 - """ - ) - fopen_mock = mock_open(read_data={"/etc/hosts": content}) - with patch("salt.utils.files.fopen", fopen_mock): - assert "thisismyhostname" in network._generate_minion_id() - - def test_is_ip(self): - self.assertTrue(network.is_ip("10.10.0.3")) - self.assertFalse(network.is_ip("0.9.800.1000")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_is_ipv4(self): - self.assertTrue(network.is_ipv4("10.10.0.3")) - self.assertFalse(network.is_ipv4("10.100.1")) - self.assertFalse(network.is_ipv4("2001:db8:0:1:1:1:1:1")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv4("sixteen-char-str")) - - def test_is_ipv6(self): - self.assertTrue(network.is_ipv6("2001:db8:0:1:1:1:1:1")) - self.assertTrue(network.is_ipv6("0:0:0:0:0:0:0:1")) - self.assertTrue(network.is_ipv6("::1")) - self.assertTrue(network.is_ipv6("::")) - self.assertTrue(network.is_ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334")) - self.assertTrue(network.is_ipv6("2001:0db8:85a3::8a2e:0370:7334")) - self.assertFalse(network.is_ipv6("2001:0db8:0370:7334")) - self.assertFalse(network.is_ipv6("2001:0db8:::0370:7334")) - self.assertFalse(network.is_ipv6("10.0.1.2")) - self.assertFalse(network.is_ipv6("2001.0db8.85a3.0000.0000.8a2e.0370.7334")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_ipv6(self): - self.assertTrue(network.ipv6("2001:db8:0:1:1:1:1:1")) - self.assertTrue(network.ipv6("0:0:0:0:0:0:0:1")) - self.assertTrue(network.ipv6("::1")) - self.assertTrue(network.ipv6("::")) - self.assertTrue(network.ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334")) - self.assertTrue(network.ipv6("2001:0db8:85a3::8a2e:0370:7334")) - self.assertTrue(network.ipv6("2001:67c:2e8::/48")) - - def test_is_loopback(self): - self.assertTrue(network.is_loopback("127.0.1.1")) - self.assertTrue(network.is_loopback("::1")) - self.assertFalse(network.is_loopback("10.0.1.2")) - self.assertFalse(network.is_loopback("2001:db8:0:1:1:1:1:1")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_parse_host_port(self): - _ip = ipaddress.ip_address - good_host_ports = { - "10.10.0.3": (_ip("10.10.0.3").compressed, None), - "10.10.0.3:1234": (_ip("10.10.0.3").compressed, 1234), - "2001:0db8:85a3::8a2e:0370:7334": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - None, - ), - "[2001:0db8:85a3::8a2e:0370:7334]:1234": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - 1234, - ), - "2001:0db8:85a3::7334": (_ip("2001:0db8:85a3::7334").compressed, None), - "[2001:0db8:85a3::7334]:1234": ( - _ip("2001:0db8:85a3::7334").compressed, - 1234, - ), - } - bad_host_ports = [ - "10.10.0.3/24", - "10.10.0.3::1234", - "2001:0db8:0370:7334", - "2001:0db8:0370::7334]:1234", - "2001:0db8:0370:0:a:b:c:d:1234", - "host name", - "host name:1234", - "10.10.0.3:abcd", - ] - for host_port, assertion_value in good_host_ports.items(): - host = port = None - host, port = network.parse_host_port(host_port) - self.assertEqual((host, port), assertion_value) - - for host_port in bad_host_ports: - try: - self.assertRaises(ValueError, network.parse_host_port, host_port) - except AssertionError as _e_: - log.error( - 'bad host_port value: "%s" failed to trigger ValueError exception', - host_port, - ) - raise _e_ - - def test_dns_check(self): - hosts = [ - { - "host": "10.10.0.3", - "port": "", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "10.10.0.3", - "port": "1234", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "2001:0db8:85a3::8a2e:0370:7334", - "port": "", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "2001:0db8:85a3::8a2e:370:7334", - "port": "1234", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "salt-master", - "port": "1234", - "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], - "ret": "127.0.0.1", - }, - ] - for host in hosts: - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, return_value=host["mocked"]), - ): - with patch("socket.socket", create_autospec(socket.socket)): - ret = network.dns_check(host["host"], host["port"]) - self.assertEqual(ret, host["ret"]) - - def test_dns_check_ipv6_filter(self): - # raise exception to skip everything after the getaddrinfo call - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, side_effect=Exception), - ) as getaddrinfo: - for ipv6, param in [ - (None, socket.AF_UNSPEC), - (True, socket.AF_INET6), - (False, socket.AF_INET), - ]: - with self.assertRaises(Exception): - network.dns_check("foo", "1", ipv6=ipv6) - getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM) - - def test_dns_check_errors(self): - with patch.object( - socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[]) - ): - with self.assertRaisesRegex( - salt.exceptions.SaltSystemExit, - "DNS lookup or connection check of 'foo' failed", - ): - network.dns_check("foo", "1") - - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, side_effect=TypeError), - ): - with self.assertRaisesRegex( - salt.exceptions.SaltSystemExit, "Invalid or unresolveable address" - ): - network.dns_check("foo", "1") - - def test_test_addrs(self): - # subset of real data from getaddrinfo against saltstack.com - addrinfo = [ - (30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), - (30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), - (30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), - (30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), - (2, 1, 6, "", ("13.35.99.52", 0)), - (2, 2, 17, "", ("13.35.99.85", 0)), - (2, 1, 6, "", ("13.35.99.85", 0)), - (2, 2, 17, "", ("13.35.99.122", 0)), - ] - with patch("socket.socket", create_autospec(socket.socket)) as s: - # we connect to the first address - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 1) - self.assertTrue(addrs[0] == addrinfo[0][4][0]) - - # the first lookup fails, succeeds on next check - s.side_effect = [socket.error, MagicMock()] - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 1) - self.assertTrue(addrs[0] == addrinfo[2][4][0]) - - # attempt to connect to resolved address with default timeout - s.side_effect = socket.error - addrs = network._test_addrs(addrinfo, 80) - time.sleep(2) - self.assertFalse(len(addrs) == 0) - - # nothing can connect, but we've eliminated duplicates - s.side_effect = socket.error - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 5) - - def test_is_subnet(self): - for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS): - for item in subnet_data[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_subnet(item)) - for item in subnet_data[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_subnet(item)) - - def test_is_ipv4_subnet(self): - for item in IPV4_SUBNETS[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_ipv4_subnet(item)) - for item in IPV4_SUBNETS[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_ipv4_subnet(item)) - - def test_is_ipv6_subnet(self): - for item in IPV6_SUBNETS[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_ipv6_subnet(item)) - for item in IPV6_SUBNETS[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_ipv6_subnet(item)) - - def test_cidr_to_ipv4_netmask(self): - self.assertEqual(network.cidr_to_ipv4_netmask(24), "255.255.255.0") - self.assertEqual(network.cidr_to_ipv4_netmask(21), "255.255.248.0") - self.assertEqual(network.cidr_to_ipv4_netmask(17), "255.255.128.0") - self.assertEqual(network.cidr_to_ipv4_netmask(9), "255.128.0.0") - self.assertEqual(network.cidr_to_ipv4_netmask(36), "") - self.assertEqual(network.cidr_to_ipv4_netmask("lol"), "") - - def test_number_of_set_bits_to_ipv4_netmask(self): - set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) - self.assertEqual(set_bits_to_netmask, "255.255.255.0") - set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) - - def test_hex2ip(self): - self.assertEqual(network.hex2ip("0x4A7D2B63"), "74.125.43.99") - self.assertEqual(network.hex2ip("0x4A7D2B63", invert=True), "99.43.125.74") - self.assertEqual( - network.hex2ip("00000000000000000000FFFF7F000001"), "127.0.0.1" - ) - self.assertEqual( - network.hex2ip("0000000000000000FFFF00000100007F", invert=True), "127.0.0.1" - ) - self.assertEqual( - network.hex2ip("20010DB8000000000000000000000000"), "2001:db8::" - ) - self.assertEqual( - network.hex2ip("B80D0120000000000000000000000000", invert=True), - "2001:db8::", - ) - - def test_interfaces_ifconfig_linux(self): - interfaces = network._interfaces_ifconfig(LINUX) - self.assertEqual( - interfaces, - { - "eth0": { - "hwaddr": "e0:3f:49:85:6a:af", - "inet": [ - { - "address": "10.10.10.56", - "broadcast": "10.10.10.255", - "netmask": "255.255.252.0", - } - ], - "inet6": [ - { - "address": "fe80::e23f:49ff:fe85:6aaf", - "prefixlen": "64", - "scope": "link", - } - ], - "up": True, - }, - "lo": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], - "up": True, - }, - }, - ) - - def test_interfaces_ifconfig_freebsd(self): - interfaces = network._interfaces_ifconfig(FREEBSD) - self.assertEqual( - interfaces, - { - "": {"up": False}, - "em0": { - "hwaddr": "00:30:48:ff:ff:ff", - "inet": [ - { - "address": "10.10.10.250", - "broadcast": "10.10.10.255", - "netmask": "255.255.255.224", - }, - { - "address": "10.10.10.56", - "broadcast": "10.10.10.63", - "netmask": "255.255.255.192", - }, - ], - "up": True, - }, - "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, - {"address": "::1", "prefixlen": "128", "scope": None}, - ], - "up": True, - }, - "plip0": {"up": False}, - "tun0": { - "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], - "up": True, - }, - }, - ) - - def test_interfaces_ifconfig_solaris(self): - with patch("salt.utils.platform.is_sunos", lambda: True): - interfaces = network._interfaces_ifconfig(SOLARIS) - expected_interfaces = { - "ilbint0": { - "inet6": [], - "inet": [ - { - "broadcast": "10.6.0.255", - "netmask": "255.255.255.0", - "address": "10.6.0.11", - } - ], - "up": True, - }, - "lo0": { - "inet6": [{"prefixlen": "128", "address": "::1"}], - "inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}], - "up": True, - }, - "ilbext0": { - "inet6": [], - "inet": [ - { - "broadcast": "10.10.11.31", - "netmask": "255.255.255.224", - "address": "10.10.11.11", - }, - { - "broadcast": "10.10.11.31", - "netmask": "255.255.255.224", - "address": "10.10.11.12", - }, - ], - "up": True, - }, - "vpn0": { - "inet6": [], - "inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}], - "up": True, - }, - "net0": { - "inet6": [ - {"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"} - ], - "inet": [ - { - "broadcast": "10.10.10.63", - "netmask": "255.255.255.224", - "address": "10.10.10.38", - } - ], - "up": True, - }, - } - self.assertEqual(interfaces, expected_interfaces) - - def test_interfaces_ifconfig_netbsd(self): - interfaces = network._netbsd_interfaces_ifconfig(NETBSD) - self.assertEqual( - interfaces, - { - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "lo0"} - ], - "up": True, - }, - "vioif0": { - "hwaddr": "00:a0:98:e6:83:18", - "inet": [ - { - "address": "192.168.1.80", - "broadcast": "192.168.1.255", - "netmask": "255.255.255.0", - } - ], - "inet6": [ - { - "address": "fe80::2a0:98ff:fee6:8318", - "prefixlen": "64", - "scope": "vioif0", - } - ], - "up": True, - }, - }, - ) - - def test_freebsd_remotes_on(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): - with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): - remotes = network._freebsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_freebsd_remotes_on_with_fat_pid(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): - with patch( - "subprocess.check_output", - return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, - ): - remotes = network._freebsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_netlink_tool_remote_on_a(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_linux", lambda: True): - with patch( - "subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT - ): - remotes = network._netlink_tool_remote_on("4506", "local_port") - self.assertEqual(remotes, {"192.168.122.177", "::ffff:127.0.0.1"}) - - def test_netlink_tool_remote_on_b(self): - with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): - remotes = network._netlink_tool_remote_on("4505", "remote_port") - self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"}) - - def test_openbsd_remotes_on(self): - with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT): - remotes = network._openbsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_openbsd_remotes_on_issue_61966(self): - """ - Test that the command output is correctly converted to string before - treating it as such - """ - with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT.encode()): - remotes = network._openbsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_generate_minion_id_distinct(self): - """ - Test if minion IDs are distinct in the pool. - - :return: - """ - with patch("platform.node", MagicMock(return_value="nodename")), patch( - "socket.gethostname", MagicMock(return_value="hostname") - ), patch( - "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - [ - "hostname.domainname.blank", - "nodename", - "hostname", - "1.2.3.4", - "5.6.7.8", - ], - ) - - def test_generate_minion_id_127_name(self): - """ - Test if minion IDs can be named 127.foo - - :return: - """ - with patch("platform.node", MagicMock(return_value="127")), patch( - "socket.gethostname", MagicMock(return_value="127") - ), patch( - "socket.getfqdn", MagicMock(return_value="127.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - ["127.domainname.blank", "127", "1.2.3.4", "5.6.7.8"], - ) - - def test_generate_minion_id_127_name_startswith(self): - """ - Test if minion IDs can be named starting from "127" - - :return: - """ - with patch("platform.node", MagicMock(return_value="127890")), patch( - "socket.gethostname", MagicMock(return_value="127890") - ), patch( - "socket.getfqdn", MagicMock(return_value="127890.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - ["127890.domainname.blank", "127890", "1.2.3.4", "5.6.7.8"], - ) - - def test_generate_minion_id_duplicate(self): - """ - Test if IP addresses in the minion IDs are distinct in the pool - - :return: - """ - with patch("platform.node", MagicMock(return_value="hostname")), patch( - "socket.gethostname", MagicMock(return_value="hostname") - ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual(network._generate_minion_id(), ["hostname", "1.2.3.4"]) - - def test_generate_minion_id_platform_used(self): - """ - Test if platform.node is used for the first occurrence. - The platform.node is most common hostname resolver before anything else. - - :return: - """ - with patch( - "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") - ), patch("socket.gethostname", MagicMock(return_value="hostname")), patch( - "socket.getfqdn", MagicMock(return_value="") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual( - network.generate_minion_id(), "very.long.and.complex.domain.name" - ) - - def test_generate_minion_id_platform_localhost_filtered(self): - """ - Test if localhost is filtered from the first occurrence. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="pick.me") - ), patch( - "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual(network.generate_minion_id(), "hostname.domainname.blank") - - def test_generate_minion_id_platform_localhost_filtered_all(self): - """ - Test if any of the localhost is filtered from everywhere. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock( - return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"] - ), - ): - self.assertEqual(network.generate_minion_id(), "1.2.3.4") - - def test_generate_minion_id_platform_localhost_only(self): - """ - Test if there is no other choice but localhost. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "localhost") - - def test_generate_minion_id_platform_fqdn(self): - """ - Test if fqdn is picked up. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "pick.me") - - def test_generate_minion_id_platform_localhost_addrinfo(self): - """ - Test if addinfo is picked up. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "pick.me") - - def test_generate_minion_id_platform_ip_addr_only(self): - """ - Test if IP address is the only what is used as a Minion ID in case no DNS name. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock( - return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"] - ), - ): - self.assertEqual(network.generate_minion_id(), "1.2.3.4") - - def test_gen_mac(self): - with patch("random.randint", return_value=1) as random_mock: - self.assertEqual(random_mock.return_value, 1) - ret = network.gen_mac("00:16:3E") - expected_mac = "00:16:3E:01:01:01" - self.assertEqual(ret, expected_mac) - - def test_mac_str_to_bytes(self): - self.assertRaises(ValueError, network.mac_str_to_bytes, "31337") - self.assertRaises(ValueError, network.mac_str_to_bytes, "0001020304056") - self.assertRaises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") - self.assertRaises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") - self.assertEqual( - b"\x10\x08\x06\x04\x02\x00", network.mac_str_to_bytes("100806040200") - ) - self.assertEqual( - b"\xf8\xe7\xd6\xc5\xb4\xa3", network.mac_str_to_bytes("f8e7d6c5b4a3") - ) - - @pytest.mark.slow_test - def test_generate_minion_id_with_long_hostname(self): - """ - Validate the fix for: - - https://github.com/saltstack/salt/issues/51160 - """ - long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" - with patch("socket.gethostname", MagicMock(return_value=long_name)): - # An exception is raised if unicode is passed to socket.getfqdn - minion_id = network.generate_minion_id() - assert minion_id != "", minion_id - - def test_filter_by_networks_with_no_filter(self): - ips = ["10.0.123.200", "10.10.10.10"] - with pytest.raises(TypeError): - network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter - - def test_filter_by_networks_empty_filter(self): - ips = ["10.0.123.200", "10.10.10.10"] - assert network.filter_by_networks(ips, []) == [] - - def test_filter_by_networks_ips_list(self): - ips = [ - "10.0.123.200", - "10.10.10.10", - "193.124.233.5", - "fe80::d210:cf3f:64e7:5423", - ] - networks = ["10.0.0.0/8", "fe80::/64"] - assert network.filter_by_networks(ips, networks) == [ - "10.0.123.200", - "10.10.10.10", - "fe80::d210:cf3f:64e7:5423", - ] - - def test_filter_by_networks_interfaces_dict(self): - interfaces = { - "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], - "eth0": [ - "2001:0DB8:0:CD30:123:4567:89AB:CDEF", - "192.168.1.101", - "10.0.123.201", - ], - } - assert network.filter_by_networks( - interfaces, ["192.168.1.0/24", "2001:db8::/48"] - ) == { - "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], - "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], - } - - def test_filter_by_networks_catch_all(self): - ips = [ - "10.0.123.200", - "10.10.10.10", - "193.124.233.5", - "fe80::d210:cf3f:64e7:5423", - ] - assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) - - def test_ip_networks(self): - # We don't need to test with each platform's ifconfig/iproute2 output, - # since this test isn't testing getting the interfaces. We already have - # tests for that. - interface_data = network._interfaces_ifconfig(LINUX) - - # Without loopback - ret = network.ip_networks(interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, specific interface - ret = network.ip_networks(interface="eth0", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, multiple specific interfaces - ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, specific interface (not present) - ret = network.ip_networks(interface="eth1", interface_data=interface_data) - assert ret == [], ret - # With loopback - ret = network.ip_networks(include_loopback=True, interface_data=interface_data) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret - # With loopback, specific interface - ret = network.ip_networks( - interface="eth0", include_loopback=True, interface_data=interface_data - ) - assert ret == ["10.10.8.0/22"], ret - # With loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", include_loopback=True, interface_data=interface_data - ) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret - # With loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", include_loopback=True, interface_data=interface_data - ) - assert ret == [], ret - - # Verbose, without loopback - ret = network.ip_networks(verbose=True, interface_data=interface_data) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, specific interface - ret = network.ip_networks( - interface="eth0", verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", verbose=True, interface_data=interface_data - ) - assert ret == {}, ret - # Verbose, with loopback - ret = network.ip_networks( - include_loopback=True, verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - "127.0.0.0/8": { - "prefixlen": 8, - "netmask": "255.0.0.0", - "num_addresses": 16777216, - "address": "127.0.0.0", - }, - }, ret - # Verbose, with loopback, specific interface - ret = network.ip_networks( - interface="eth0", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, with loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - "127.0.0.0/8": { - "prefixlen": 8, - "netmask": "255.0.0.0", - "num_addresses": 16777216, - "address": "127.0.0.0", - }, - }, ret - # Verbose, with loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == {}, ret - - def test_ip_networks6(self): - # We don't need to test with each platform's ifconfig/iproute2 output, - # since this test isn't testing getting the interfaces. We already have - # tests for that. - interface_data = network._interfaces_ifconfig(LINUX) - - # Without loopback - ret = network.ip_networks6(interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, specific interface - ret = network.ip_networks6(interface="eth0", interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, multiple specific interfaces - ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, specific interface (not present) - ret = network.ip_networks6(interface="eth1", interface_data=interface_data) - assert ret == [], ret - # With loopback - ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) - assert ret == ["::1/128", "fe80::/64"], ret - # With loopback, specific interface - ret = network.ip_networks6( - interface="eth0", include_loopback=True, interface_data=interface_data - ) - assert ret == ["fe80::/64"], ret - # With loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", include_loopback=True, interface_data=interface_data - ) - assert ret == ["::1/128", "fe80::/64"], ret - # With loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", include_loopback=True, interface_data=interface_data - ) - assert ret == [], ret - - # Verbose, without loopback - ret = network.ip_networks6(verbose=True, interface_data=interface_data) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, specific interface - ret = network.ip_networks6( - interface="eth0", verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", verbose=True, interface_data=interface_data - ) - assert ret == {}, ret - # Verbose, with loopback - ret = network.ip_networks6( - include_loopback=True, verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - "::1/128": { - "prefixlen": 128, - "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - "num_addresses": 1, - "address": "::1", - }, - }, ret - # Verbose, with loopback, specific interface - ret = network.ip_networks6( - interface="eth0", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, with loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - "::1/128": { - "prefixlen": 128, - "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - "num_addresses": 1, - "address": "::1", - }, - }, ret - # Verbose, with loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == {}, ret - - def test_get_fqhostname_return(self): - """ - Test if proper hostname is used when RevDNS differ from hostname - - :return: - """ - with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( - "socket.getfqdn", - MagicMock(return_value="very.long.and.complex.domain.name"), - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ): - self.assertEqual(network.get_fqhostname(), "hostname") - - def test_get_fqhostname_return_empty_hostname(self): - """ - Test if proper hostname is used when hostname returns empty string - """ - host = "hostname" - with patch("socket.gethostname", MagicMock(return_value=host)), patch( - "socket.getfqdn", - MagicMock(return_value="very.long.and.complex.domain.name"), - ), patch( - "socket.getaddrinfo", - MagicMock( - return_value=[ - (2, 3, 0, host, ("127.0.1.1", 0)), - (2, 3, 0, "", ("127.0.1.1", 0)), - ] - ), - ): - self.assertEqual(network.get_fqhostname(), host) - - def test_ip_bracket(self): - test_ipv4 = "127.0.0.1" - test_ipv6 = "::1" - test_ipv6_uri = "[::1]" - self.assertEqual(test_ipv4, network.ip_bracket(test_ipv4)) - self.assertEqual(test_ipv6, network.ip_bracket(test_ipv6_uri, strip=True)) - self.assertEqual("[{}]".format(test_ipv6), network.ip_bracket(test_ipv6)) - self.assertEqual("[{}]".format(test_ipv6), network.ip_bracket(test_ipv6_uri)) - - ip_addr_obj = ipaddress.ip_address(test_ipv4) - self.assertEqual(test_ipv4, network.ip_bracket(ip_addr_obj)) From 4e80309923670ede0aa7748800e0d366f8f248fd Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 5 Oct 2023 13:55:51 -0600 Subject: [PATCH 063/196] Initial working tests after migration to pytests, and pre-commit pass --- tests/pytests/unit/utils/test_network.py | 39 ++++++++++++++---------- 1 file changed, 23 insertions(+), 16 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 42078bd571a..1e4f1c95de9 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -157,31 +157,38 @@ def test_host_to_ips(): assertion. """ - _side_effect_ipv4 = { - "github.com": [ - (2, 1, 6, "", ("192.30.255.112", 0)), - (2, 1, 6, "", ("192.30.255.113", 0)), - ], - } + # pylint doesn't like the }[host] below, disable typecheck + # pylint: disable=all + def getaddrinfo_side_effect(host, *args): + try: + return { + "github.com": [ + (2, 1, 6, "", ("192.30.255.112", 0)), + (2, 1, 6, "", ("192.30.255.113", 0)), + ], + "ipv6host.foo": [ + (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), + ], + }[host] + except KeyError: + raise socket.gaierror(-2, "Name or service not known") - _side_effect_ipv6 = { - "ipv6host.foo": [ - (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), - ], - } - ## getaddrinfo_mock = MagicMock(side_effect=_side_effect) - ## with patch.object(socket, "getaddrinfo", getaddrinfo_mock): - with patch.object(socket, "getaddrinfo", MagicMock(side_effect=_side_effect_ipv4)): - # Test host that can be resolved, ipv4 + # pylint: enable=all + + getaddrinfo_mock = MagicMock(side_effect=getaddrinfo_side_effect) + with patch.object(socket, "getaddrinfo", getaddrinfo_mock): + # Test host that can be resolved ret = network.host_to_ips("github.com") + log.warning(f"DGM test_host_to_ips ipv4, ret '{ret}'") assert ret == ["192.30.255.112", "192.30.255.113"] - with patch.object(socket, "getaddrinfo", MagicMock(side_effect=_side_effect_ipv6)): # Test ipv6 ret = network.host_to_ips("ipv6host.foo") + log.warning(f"DGM test_host_to_ips ipv6, ret '{ret}'") assert ret == ["2001:a71::1"] # Test host that can't be resolved ret = network.host_to_ips("someothersite.com") + log.warning(f"DGM test_host_to_ips ipv6 2, ret '{ret}'") assert ret is None From 1a25bd7630b7c779147f053f89be139d97624904 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 5 Oct 2023 15:06:16 -0600 Subject: [PATCH 064/196] Added some tests for code-coverage --- tests/pytests/unit/utils/test_network.py | 1091 ++++++++++++---------- 1 file changed, 572 insertions(+), 519 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 1e4f1c95de9..b689993ebdd 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -11,6 +11,11 @@ import salt.utils.network as network from salt._compat import ipaddress from tests.support.mock import MagicMock, create_autospec, mock_open, patch +pytestmark = [ + pytest.mark.skip_on_windows, +] + + log = logging.getLogger(__name__) LINUX = """\ @@ -179,16 +184,13 @@ def test_host_to_ips(): with patch.object(socket, "getaddrinfo", getaddrinfo_mock): # Test host that can be resolved ret = network.host_to_ips("github.com") - log.warning(f"DGM test_host_to_ips ipv4, ret '{ret}'") assert ret == ["192.30.255.112", "192.30.255.113"] # Test ipv6 ret = network.host_to_ips("ipv6host.foo") - log.warning(f"DGM test_host_to_ips ipv6, ret '{ret}'") assert ret == ["2001:a71::1"] # Test host that can't be resolved ret = network.host_to_ips("someothersite.com") - log.warning(f"DGM test_host_to_ips ipv6 2, ret '{ret}'") assert ret is None @@ -830,519 +832,570 @@ def test_generate_minion_id_platform_localhost_filtered(): assert network.generate_minion_id() == "hostname.domainname.blank" -## def test_generate_minion_id_platform_localhost_filtered_all(): -## """ -## Test if any of the localhost is filtered from everywhere. -## -## :return: -## """ -## with patch("platform.node", MagicMock(return_value="localhost")), patch( -## "socket.gethostname", MagicMock(return_value="ip6-loopback") -## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( -## "socket.getaddrinfo", -## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), -## ), patch( -## "salt.utils.files.fopen", mock_open() -## ), patch( -## "salt.utils.network.ip_addrs", -## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), -## ): -## assert network.generate_minion_id() == "1.2.3.4" -## -## -## def test_generate_minion_id_platform_localhost_only(): -## """ -## Test if there is no other choice but localhost. -## -## :return: -## """ -## with patch("platform.node", MagicMock(return_value="localhost")), patch( -## "socket.gethostname", MagicMock(return_value="ip6-loopback") -## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( -## "socket.getaddrinfo", -## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), -## ), patch( -## "salt.utils.files.fopen", mock_open() -## ), patch( -## "salt.utils.network.ip_addrs", -## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), -## ): -## assert network.generate_minion_id() == "localhost" -## -## -## def test_generate_minion_id_platform_fqdn(): -## """ -## Test if fqdn is picked up. -## -## :return: -## """ -## with patch("platform.node", MagicMock(return_value="localhost")), patch( -## "socket.gethostname", MagicMock(return_value="ip6-loopback") -## ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( -## "socket.getaddrinfo", -## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), -## ), patch( -## "salt.utils.files.fopen", mock_open() -## ), patch( -## "salt.utils.network.ip_addrs", -## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), -## ): -## assert network.generate_minion_id() == "pick.me" -## -## -## def test_generate_minion_id_platform_localhost_addrinfo(): -## """ -## Test if addinfo is picked up. -## -## :return: -## """ -## with patch("platform.node", MagicMock(return_value="localhost")), patch( -## "socket.gethostname", MagicMock(return_value="ip6-loopback") -## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( -## "socket.getaddrinfo", -## MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), -## ), patch( -## "salt.utils.files.fopen", mock_open() -## ), patch( -## "salt.utils.network.ip_addrs", -## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), -## ): -## assert network.generate_minion_id() == "pick.me" -## -## -## def test_generate_minion_id_platform_ip_addr_only(): -## """ -## Test if IP address is the only what is used as a Minion ID in case no DNS name. -## -## :return: -## """ -## with patch("platform.node", MagicMock(return_value="localhost")), patch( -## "socket.gethostname", MagicMock(return_value="ip6-loopback") -## ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( -## "socket.getaddrinfo", -## MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), -## ), patch( -## "salt.utils.files.fopen", mock_open() -## ), patch( -## "salt.utils.network.ip_addrs", -## MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), -## ): -## assert network.generate_minion_id() == "1.2.3.4" -## -## -## def test_gen_mac(): -## with patch("random.randint", return_value=1) as random_mock: -## assert random_mock.return_value == 1 -## ret = network.gen_mac("00:16:3E") -## expected_mac = "00:16:3E:01:01:01" -## assert ret == expected_mac -## -## -## def test_mac_str_to_bytes(): -## pytest.raises(ValueError, network.mac_str_to_bytes, "31337") -## pytest.raises(ValueError, network.mac_str_to_bytes, "0001020304056") -## pytest.raises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") -## pytest.raises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") -## assert b"\x10\x08\x06\x04\x02\x00" == network.mac_str_to_bytes("100806040200") -## assert b"\xf8\xe7\xd6\xc5\xb4\xa3" == network.mac_str_to_bytes("f8e7d6c5b4a3") -## -## -## @pytest.mark.slow_test -## def test_generate_minion_id_with_long_hostname(): -## """ -## Validate the fix for: -## -## https://github.com/saltstack/salt/issues/51160 -## """ -## long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" -## with patch("socket.gethostname", MagicMock(return_value=long_name)): -## # An exception is raised if unicode is passed to socket.getfqdn -## minion_id = network.generate_minion_id() -## assert minion_id != "", minion_id -## -## -## def test_filter_by_networks_with_no_filter(): -## ips = ["10.0.123.200", "10.10.10.10"] -## with pytest.raises(TypeError): -## network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter -## -## -## def test_filter_by_networks_empty_filter(): -## ips = ["10.0.123.200", "10.10.10.10"] -## assert network.filter_by_networks(ips, []) == [] -## -## -## def test_filter_by_networks_ips_list(): -## ips = [ -## "10.0.123.200", -## "10.10.10.10", -## "193.124.233.5", -## "fe80::d210:cf3f:64e7:5423", -## ] -## networks = ["10.0.0.0/8", "fe80::/64"] -## assert network.filter_by_networks(ips, networks) == [ -## "10.0.123.200", -## "10.10.10.10", -## "fe80::d210:cf3f:64e7:5423", -## ] -## -## -## def test_filter_by_networks_interfaces_dict(): -## interfaces = { -## "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], -## "eth0": [ -## "2001:0DB8:0:CD30:123:4567:89AB:CDEF", -## "192.168.1.101", -## "10.0.123.201", -## ], -## } -## assert network.filter_by_networks( -## interfaces, ["192.168.1.0/24", "2001:db8::/48"] -## ) == { -## "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], -## "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], -## } -## -## -## def test_filter_by_networks_catch_all(): -## ips = [ -## "10.0.123.200", -## "10.10.10.10", -## "193.124.233.5", -## "fe80::d210:cf3f:64e7:5423", -## ] -## assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) -## -## -## def test_ip_networks(): -## # We don't need to test with each platform's ifconfig/iproute2 output, -## # since this test isn't testing getting the interfaces. We already have -## # tests for that. -## interface_data = network._interfaces_ifconfig(LINUX) -## -## # Without loopback -## ret = network.ip_networks(interface_data=interface_data) -## assert ret == ["10.10.8.0/22"], ret -## # Without loopback, specific interface -## ret = network.ip_networks(interface="eth0", interface_data=interface_data) -## assert ret == ["10.10.8.0/22"], ret -## # Without loopback, multiple specific interfaces -## ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) -## assert ret == ["10.10.8.0/22"], ret -## # Without loopback, specific interface (not present) -## ret = network.ip_networks(interface="eth1", interface_data=interface_data) -## assert ret == [], ret -## # With loopback -## ret = network.ip_networks(include_loopback=True, interface_data=interface_data) -## assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret -## # With loopback, specific interface -## ret = network.ip_networks( -## interface="eth0", include_loopback=True, interface_data=interface_data -## ) -## assert ret == ["10.10.8.0/22"], ret -## # With loopback, multiple specific interfaces -## ret = network.ip_networks( -## interface="eth0,lo", include_loopback=True, interface_data=interface_data -## ) -## assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret -## # With loopback, specific interface (not present) -## ret = network.ip_networks( -## interface="eth1", include_loopback=True, interface_data=interface_data -## ) -## assert ret == [], ret -## -## # Verbose, without loopback -## ret = network.ip_networks(verbose=True, interface_data=interface_data) -## assert ret == { -## "10.10.8.0/22": { -## "prefixlen": 22, -## "netmask": "255.255.252.0", -## "num_addresses": 1024, -## "address": "10.10.8.0", -## }, -## }, ret -## # Verbose, without loopback, specific interface -## ret = network.ip_networks( -## interface="eth0", verbose=True, interface_data=interface_data -## ) -## assert ret == { -## "10.10.8.0/22": { -## "prefixlen": 22, -## "netmask": "255.255.252.0", -## "num_addresses": 1024, -## "address": "10.10.8.0", -## }, -## }, ret -## # Verbose, without loopback, multiple specific interfaces -## ret = network.ip_networks( -## interface="eth0,lo", verbose=True, interface_data=interface_data -## ) -## assert ret == { -## "10.10.8.0/22": { -## "prefixlen": 22, -## "netmask": "255.255.252.0", -## "num_addresses": 1024, -## "address": "10.10.8.0", -## }, -## }, ret -## # Verbose, without loopback, specific interface (not present) -## ret = network.ip_networks( -## interface="eth1", verbose=True, interface_data=interface_data -## ) -## assert ret == {}, ret -## # Verbose, with loopback -## ret = network.ip_networks( -## include_loopback=True, verbose=True, interface_data=interface_data -## ) -## assert ret == { -## "10.10.8.0/22": { -## "prefixlen": 22, -## "netmask": "255.255.252.0", -## "num_addresses": 1024, -## "address": "10.10.8.0", -## }, -## "127.0.0.0/8": { -## "prefixlen": 8, -## "netmask": "255.0.0.0", -## "num_addresses": 16777216, -## "address": "127.0.0.0", -## }, -## }, ret -## # Verbose, with loopback, specific interface -## ret = network.ip_networks( -## interface="eth0", -## include_loopback=True, -## verbose=True, -## interface_data=interface_data, -## ) -## assert ret == { -## "10.10.8.0/22": { -## "prefixlen": 22, -## "netmask": "255.255.252.0", -## "num_addresses": 1024, -## "address": "10.10.8.0", -## }, -## }, ret -## # Verbose, with loopback, multiple specific interfaces -## ret = network.ip_networks( -## interface="eth0,lo", -## include_loopback=True, -## verbose=True, -## interface_data=interface_data, -## ) -## assert ret == { -## "10.10.8.0/22": { -## "prefixlen": 22, -## "netmask": "255.255.252.0", -## "num_addresses": 1024, -## "address": "10.10.8.0", -## }, -## "127.0.0.0/8": { -## "prefixlen": 8, -## "netmask": "255.0.0.0", -## "num_addresses": 16777216, -## "address": "127.0.0.0", -## }, -## }, ret -## # Verbose, with loopback, specific interface (not present) -## ret = network.ip_networks( -## interface="eth1", -## include_loopback=True, -## verbose=True, -## interface_data=interface_data, -## ) -## assert ret == {}, ret -## -## -## def test_ip_networks6(): -## # We don't need to test with each platform's ifconfig/iproute2 output, -## # since this test isn't testing getting the interfaces. We already have -## # tests for that. -## interface_data = network._interfaces_ifconfig(LINUX) -## -## # Without loopback -## ret = network.ip_networks6(interface_data=interface_data) -## assert ret == ["fe80::/64"], ret -## # Without loopback, specific interface -## ret = network.ip_networks6(interface="eth0", interface_data=interface_data) -## assert ret == ["fe80::/64"], ret -## # Without loopback, multiple specific interfaces -## ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) -## assert ret == ["fe80::/64"], ret -## # Without loopback, specific interface (not present) -## ret = network.ip_networks6(interface="eth1", interface_data=interface_data) -## assert ret == [], ret -## # With loopback -## ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) -## assert ret == ["::1/128", "fe80::/64"], ret -## # With loopback, specific interface -## ret = network.ip_networks6( -## interface="eth0", include_loopback=True, interface_data=interface_data -## ) -## assert ret == ["fe80::/64"], ret -## # With loopback, multiple specific interfaces -## ret = network.ip_networks6( -## interface="eth0,lo", include_loopback=True, interface_data=interface_data -## ) -## assert ret == ["::1/128", "fe80::/64"], ret -## # With loopback, specific interface (not present) -## ret = network.ip_networks6( -## interface="eth1", include_loopback=True, interface_data=interface_data -## ) -## assert ret == [], ret -## -## # Verbose, without loopback -## ret = network.ip_networks6(verbose=True, interface_data=interface_data) -## assert ret == { -## "fe80::/64": { -## "prefixlen": 64, -## "netmask": "ffff:ffff:ffff:ffff::", -## "num_addresses": 18446744073709551616, -## "address": "fe80::", -## }, -## }, ret -## # Verbose, without loopback, specific interface -## ret = network.ip_networks6( -## interface="eth0", verbose=True, interface_data=interface_data -## ) -## assert ret == { -## "fe80::/64": { -## "prefixlen": 64, -## "netmask": "ffff:ffff:ffff:ffff::", -## "num_addresses": 18446744073709551616, -## "address": "fe80::", -## }, -## }, ret -## # Verbose, without loopback, multiple specific interfaces -## ret = network.ip_networks6( -## interface="eth0,lo", verbose=True, interface_data=interface_data -## ) -## assert ret == { -## "fe80::/64": { -## "prefixlen": 64, -## "netmask": "ffff:ffff:ffff:ffff::", -## "num_addresses": 18446744073709551616, -## "address": "fe80::", -## }, -## }, ret -## # Verbose, without loopback, specific interface (not present) -## ret = network.ip_networks6( -## interface="eth1", verbose=True, interface_data=interface_data -## ) -## assert ret == {}, ret -## # Verbose, with loopback -## ret = network.ip_networks6( -## include_loopback=True, verbose=True, interface_data=interface_data -## ) -## assert ret == { -## "fe80::/64": { -## "prefixlen": 64, -## "netmask": "ffff:ffff:ffff:ffff::", -## "num_addresses": 18446744073709551616, -## "address": "fe80::", -## }, -## "::1/128": { -## "prefixlen": 128, -## "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", -## "num_addresses": 1, -## "address": "::1", -## }, -## }, ret -## # Verbose, with loopback, specific interface -## ret = network.ip_networks6( -## interface="eth0", -## include_loopback=True, -## verbose=True, -## interface_data=interface_data, -## ) -## assert ret == { -## "fe80::/64": { -## "prefixlen": 64, -## "netmask": "ffff:ffff:ffff:ffff::", -## "num_addresses": 18446744073709551616, -## "address": "fe80::", -## }, -## }, ret -## # Verbose, with loopback, multiple specific interfaces -## ret = network.ip_networks6( -## interface="eth0,lo", -## include_loopback=True, -## verbose=True, -## interface_data=interface_data, -## ) -## assert ret == { -## "fe80::/64": { -## "prefixlen": 64, -## "netmask": "ffff:ffff:ffff:ffff::", -## "num_addresses": 18446744073709551616, -## "address": "fe80::", -## }, -## "::1/128": { -## "prefixlen": 128, -## "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", -## "num_addresses": 1, -## "address": "::1", -## }, -## }, ret -## # Verbose, with loopback, specific interface (not present) -## ret = network.ip_networks6( -## interface="eth1", -## include_loopback=True, -## verbose=True, -## interface_data=interface_data, -## ) -## assert ret == {}, ret -## -## -## def test_get_fqhostname_return(): -## """ -## Test if proper hostname is used when RevDNS differ from hostname -## -## :return: -## """ -## with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( -## "socket.getfqdn", -## MagicMock(return_value="very.long.and.complex.domain.name"), -## ), patch( -## "socket.getaddrinfo", -## MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), -## ): -## assert network.get_fqhostname() == "hostname" -## -## -## def test_get_fqhostname_return_empty_hostname(): -## """ -## Test if proper hostname is used when hostname returns empty string -## """ -## host = "hostname" -## with patch("socket.gethostname", MagicMock(return_value=host)), patch( -## "socket.getfqdn", -## MagicMock(return_value="very.long.and.complex.domain.name"), -## ), patch( -## "socket.getaddrinfo", -## MagicMock( -## return_value=[ -## (2, 3, 0, host, ("127.0.1.1", 0)), -## (2, 3, 0, "", ("127.0.1.1", 0)), -## ] -## ), -## ): -## assert network.get_fqhostname() == host -## -## -## def test_ip_bracket(): -## test_ipv4 = "127.0.0.1" -## test_ipv6 = "::1" -## test_ipv6_uri = "[::1]" -## assert test_ipv4 == network.ip_bracket(test_ipv4) -## assert test_ipv6 == network.ip_bracket(test_ipv6_uri, strip=True) -## assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6) -## assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6_uri) -## -## ip_addr_obj = ipaddress.ip_address(test_ipv4) -## assert test_ipv4 == network.ip_bracket(ip_addr_obj) -## -## -## def test_junos_ifconfig_output_parsing(): -## ret = network._junos_interfaces_ifconfig("inet mtu 0 local=" + " " * 3456) -## assert ret == {"inet": {"up": False}} +def test_generate_minion_id_platform_localhost_filtered_all(): + """ + Test if any of the localhost is filtered from everywhere. + + :return: + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "1.2.3.4" + + +def test_generate_minion_id_platform_localhost_only(): + """ + Test if there is no other choice but localhost. + + :return: + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "localhost" + + +def test_generate_minion_id_platform_fqdn(): + """ + Test if fqdn is picked up. + + :return: + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "pick.me" + + +def test_generate_minion_id_platform_localhost_addrinfo(): + """ + Test if addinfo is picked up. + + :return: + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "pick.me" + + +def test_generate_minion_id_platform_ip_addr_only(): + """ + Test if IP address is the only what is used as a Minion ID in case no DNS name. + + :return: + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "1.2.3.4" + + +def test_gen_mac(): + with patch("random.randint", return_value=1) as random_mock: + assert random_mock.return_value == 1 + ret = network.gen_mac("00:16:3E") + expected_mac = "00:16:3E:01:01:01" + assert ret == expected_mac + + +def test_mac_str_to_bytes(): + pytest.raises(ValueError, network.mac_str_to_bytes, "31337") + pytest.raises(ValueError, network.mac_str_to_bytes, "0001020304056") + pytest.raises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") + pytest.raises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") + assert b"\x10\x08\x06\x04\x02\x00" == network.mac_str_to_bytes("100806040200") + assert b"\xf8\xe7\xd6\xc5\xb4\xa3" == network.mac_str_to_bytes("f8e7d6c5b4a3") + + +@pytest.mark.slow_test +def test_generate_minion_id_with_long_hostname(): + """ + Validate the fix for: + + https://github.com/saltstack/salt/issues/51160 + """ + long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" + with patch("socket.gethostname", MagicMock(return_value=long_name)): + # An exception is raised if unicode is passed to socket.getfqdn + minion_id = network.generate_minion_id() + assert minion_id != "", minion_id + + +def test_filter_by_networks_with_no_filter(): + ips = ["10.0.123.200", "10.10.10.10"] + with pytest.raises(TypeError): + network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter + + +def test_filter_by_networks_empty_filter(): + ips = ["10.0.123.200", "10.10.10.10"] + assert network.filter_by_networks(ips, []) == [] + + +def test_filter_by_networks_ips_list(): + ips = [ + "10.0.123.200", + "10.10.10.10", + "193.124.233.5", + "fe80::d210:cf3f:64e7:5423", + ] + networks = ["10.0.0.0/8", "fe80::/64"] + assert network.filter_by_networks(ips, networks) == [ + "10.0.123.200", + "10.10.10.10", + "fe80::d210:cf3f:64e7:5423", + ] + + +def test_filter_by_networks_interfaces_dict(): + interfaces = { + "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], + "eth0": [ + "2001:0DB8:0:CD30:123:4567:89AB:CDEF", + "192.168.1.101", + "10.0.123.201", + ], + } + assert network.filter_by_networks( + interfaces, ["192.168.1.0/24", "2001:db8::/48"] + ) == { + "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], + "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], + } + + +def test_filter_by_networks_catch_all(): + ips = [ + "10.0.123.200", + "10.10.10.10", + "193.124.233.5", + "fe80::d210:cf3f:64e7:5423", + ] + assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) + + +def test_ip_networks(): + # We don't need to test with each platform's ifconfig/iproute2 output, + # since this test isn't testing getting the interfaces. We already have + # tests for that. + interface_data = network._interfaces_ifconfig(LINUX) + + # Without loopback + ret = network.ip_networks(interface_data=interface_data) + assert ret == ["10.10.8.0/22"], ret + # Without loopback, specific interface + ret = network.ip_networks(interface="eth0", interface_data=interface_data) + assert ret == ["10.10.8.0/22"], ret + # Without loopback, multiple specific interfaces + ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) + assert ret == ["10.10.8.0/22"], ret + # Without loopback, specific interface (not present) + ret = network.ip_networks(interface="eth1", interface_data=interface_data) + assert ret == [], ret + # With loopback + ret = network.ip_networks(include_loopback=True, interface_data=interface_data) + assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret + # With loopback, specific interface + ret = network.ip_networks( + interface="eth0", include_loopback=True, interface_data=interface_data + ) + assert ret == ["10.10.8.0/22"], ret + # With loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", include_loopback=True, interface_data=interface_data + ) + assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret + # With loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", include_loopback=True, interface_data=interface_data + ) + assert ret == [], ret + + # Verbose, without loopback + ret = network.ip_networks(verbose=True, interface_data=interface_data) + assert ret == { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + }, ret + # Verbose, without loopback, specific interface + ret = network.ip_networks( + interface="eth0", verbose=True, interface_data=interface_data + ) + assert ret == { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + }, ret + # Verbose, without loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", verbose=True, interface_data=interface_data + ) + assert ret == { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + }, ret + # Verbose, without loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", verbose=True, interface_data=interface_data + ) + assert ret == {}, ret + # Verbose, with loopback + ret = network.ip_networks( + include_loopback=True, verbose=True, interface_data=interface_data + ) + assert ret == { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + "127.0.0.0/8": { + "prefixlen": 8, + "netmask": "255.0.0.0", + "num_addresses": 16777216, + "address": "127.0.0.0", + }, + }, ret + # Verbose, with loopback, specific interface + ret = network.ip_networks( + interface="eth0", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + }, ret + # Verbose, with loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + "127.0.0.0/8": { + "prefixlen": 8, + "netmask": "255.0.0.0", + "num_addresses": 16777216, + "address": "127.0.0.0", + }, + }, ret + # Verbose, with loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == {}, ret + + +def test_ip_networks6(): + # We don't need to test with each platform's ifconfig/iproute2 output, + # since this test isn't testing getting the interfaces. We already have + # tests for that. + interface_data = network._interfaces_ifconfig(LINUX) + + # Without loopback + ret = network.ip_networks6(interface_data=interface_data) + assert ret == ["fe80::/64"], ret + # Without loopback, specific interface + ret = network.ip_networks6(interface="eth0", interface_data=interface_data) + assert ret == ["fe80::/64"], ret + # Without loopback, multiple specific interfaces + ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) + assert ret == ["fe80::/64"], ret + # Without loopback, specific interface (not present) + ret = network.ip_networks6(interface="eth1", interface_data=interface_data) + assert ret == [], ret + # With loopback + ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) + assert ret == ["::1/128", "fe80::/64"], ret + # With loopback, specific interface + ret = network.ip_networks6( + interface="eth0", include_loopback=True, interface_data=interface_data + ) + assert ret == ["fe80::/64"], ret + # With loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", include_loopback=True, interface_data=interface_data + ) + assert ret == ["::1/128", "fe80::/64"], ret + # With loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", include_loopback=True, interface_data=interface_data + ) + assert ret == [], ret + + # Verbose, without loopback + ret = network.ip_networks6(verbose=True, interface_data=interface_data) + assert ret == { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + }, ret + # Verbose, without loopback, specific interface + ret = network.ip_networks6( + interface="eth0", verbose=True, interface_data=interface_data + ) + assert ret == { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + }, ret + # Verbose, without loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", verbose=True, interface_data=interface_data + ) + assert ret == { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + }, ret + # Verbose, without loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", verbose=True, interface_data=interface_data + ) + assert ret == {}, ret + # Verbose, with loopback + ret = network.ip_networks6( + include_loopback=True, verbose=True, interface_data=interface_data + ) + assert ret == { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + "::1/128": { + "prefixlen": 128, + "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + "num_addresses": 1, + "address": "::1", + }, + }, ret + # Verbose, with loopback, specific interface + ret = network.ip_networks6( + interface="eth0", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + }, ret + # Verbose, with loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + "::1/128": { + "prefixlen": 128, + "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + "num_addresses": 1, + "address": "::1", + }, + }, ret + # Verbose, with loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == {}, ret + + +def test_get_fqhostname_return(): + """ + Test if proper hostname is used when RevDNS differ from hostname + + :return: + """ + with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( + "socket.getfqdn", + MagicMock(return_value="very.long.and.complex.domain.name"), + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ): + assert network.get_fqhostname() == "hostname" + + +def test_get_fqhostname_return_empty_hostname(): + """ + Test if proper hostname is used when hostname returns empty string + """ + host = "hostname" + with patch("socket.gethostname", MagicMock(return_value=host)), patch( + "socket.getfqdn", + MagicMock(return_value="very.long.and.complex.domain.name"), + ), patch( + "socket.getaddrinfo", + MagicMock( + return_value=[ + (2, 3, 0, host, ("127.0.1.1", 0)), + (2, 3, 0, "", ("127.0.1.1", 0)), + ] + ), + ): + assert network.get_fqhostname() == host + + +def test_ip_bracket(): + test_ipv4 = "127.0.0.1" + test_ipv6 = "::1" + test_ipv6_uri = "[::1]" + assert test_ipv4 == network.ip_bracket(test_ipv4) + assert test_ipv6 == network.ip_bracket(test_ipv6_uri, strip=True) + assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6) + assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6_uri) + + ip_addr_obj = ipaddress.ip_address(test_ipv4) + assert test_ipv4 == network.ip_bracket(ip_addr_obj) + + +def test_junos_ifconfig_output_parsing(): + ret = network._junos_interfaces_ifconfig("inet mtu 0 local=" + " " * 3456) + assert ret == {"inet": {"up": False}} + + +def test_isportopen_false(): + ret = network.isportopen("127.0.0.1", "66000") + assert ret is False + + +def test_isportopen(): + ret = network.isportopen("127.0.0.1", "22") + assert ret == 0 + + +def test_get_socket(): + ret = network.get_socket("127.0.0.1") + assert ret.family == socket.AF_INET + assert ret.type == socket.SOCK_STREAM + + ret = network.get_socket("2001:a71::1") + assert ret.family == socket.AF_INET6 + assert ret.type == socket.SOCK_STREAM + + +def test_ip_to_host(): + ret = network.ip_to_host("127.0.0.1") + assert ret == "localhost" + + ret = network.ip_to_host("2001:a71::1") + assert ret is None + + ret = network.ip_to_host("::1") + assert ret == "ip6-localhost" + + +def test_natural_ipv4_netmask(): + ret = network.natural_ipv4_netmask("192.168.0.115") + assert ret == "/24" + + ret = network.natural_ipv4_netmask("192.168.1.80") + assert ret == "/24" + + ret = network.natural_ipv4_netmask("10.10.10.250") + assert ret == "/8" + + ret = network.natural_ipv4_netmask("192.168.0.115", fmt="netmask") + assert ret == "255.255.255.0" + + ret = network.natural_ipv4_netmask("192.168.1.80", fmt="netmask") + assert ret == "255.255.255.0" + + ret = network.natural_ipv4_netmask("10.10.10.250", fmt="netmask") + assert ret == "255.0.0.0" From 0b810f016d4f4dd113e41a8e4fb50fc4f8a72ae3 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 10 Oct 2023 10:59:41 -0600 Subject: [PATCH 065/196] Adjusted test --- tests/pytests/unit/utils/test_network.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index b689993ebdd..3f013e19e0b 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1378,7 +1378,7 @@ def test_ip_to_host(): assert ret is None ret = network.ip_to_host("::1") - assert ret == "ip6-localhost" + assert ret == "localhost" def test_natural_ipv4_netmask(): From 3702bcf8e799ccc4608c7e6ccef7cfd0825be379 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 10 Oct 2023 14:53:06 -0600 Subject: [PATCH 066/196] Added log to check ret for test --- tests/pytests/unit/utils/test_network.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 3f013e19e0b..9d5711f89ad 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1378,7 +1378,12 @@ def test_ip_to_host(): assert ret is None ret = network.ip_to_host("::1") - assert ret == "localhost" + ## if amzn2 + ## assert ret == "localhost6" + ## else if debian family: + ## assert ret == "ip6-localhost" + log.warning(f"DGM test_ip_to_host ret '{ret}'") + assert ret == "dog" def test_natural_ipv4_netmask(): From eac9121a876f483abda63e85c8d2d1b2c5042ea3 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 10 Oct 2023 16:28:30 -0600 Subject: [PATCH 067/196] Update tests --- tests/pytests/unit/utils/test_network.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 9d5711f89ad..702f3da84ad 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1370,7 +1370,7 @@ def test_get_socket(): assert ret.type == socket.SOCK_STREAM -def test_ip_to_host(): +def test_ip_to_host(grains): ret = network.ip_to_host("127.0.0.1") assert ret == "localhost" @@ -1378,12 +1378,13 @@ def test_ip_to_host(): assert ret is None ret = network.ip_to_host("::1") - ## if amzn2 - ## assert ret == "localhost6" - ## else if debian family: - ## assert ret == "ip6-localhost" log.warning(f"DGM test_ip_to_host ret '{ret}'") - assert ret == "dog" + if grains["os"] == "Amazon": + assert ret == "localhost6" + elif grains["os_family"] == "Debian": + assert ret == "ip6-localhost" + elif grains["os_family"] == "RedHat": + assert ret == "localhost" def test_natural_ipv4_netmask(): From fbd2fb282a8b23e695f86dba7f1ee04d2c0c2c61 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 11 Oct 2023 12:15:11 -0600 Subject: [PATCH 068/196] Updated localhost IPv6 tests to allow for different Linux OSs --- tests/pytests/unit/utils/test_network.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 702f3da84ad..a413280862b 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1378,12 +1378,19 @@ def test_ip_to_host(grains): assert ret is None ret = network.ip_to_host("::1") - log.warning(f"DGM test_ip_to_host ret '{ret}'") if grains["os"] == "Amazon": assert ret == "localhost6" elif grains["os_family"] == "Debian": - assert ret == "ip6-localhost" + if grains["osmajorrelease"] == "12": + assert ret == "localhost" + else: + assert ret == "ip6-localhost" elif grains["os_family"] == "RedHat": + if grains["oscodename"] == "Photon": + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" + else: assert ret == "localhost" From 5f8130a59291c7fb4214588dbc6968c6cfe3e58e Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 12 Oct 2023 17:55:53 -0600 Subject: [PATCH 069/196] Updated tests and mark other old OS's as no coverage --- salt/utils/network.py | 10 +- tests/pytests/unit/utils/test_network.py | 231 +++++++++++++++++------ 2 files changed, 180 insertions(+), 61 deletions(-) diff --git a/salt/utils/network.py b/salt/utils/network.py index 2bea2cf1293..d327d6216c9 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py @@ -1004,6 +1004,7 @@ def _netbsd_interfaces_ifconfig(out): return ret +# pragma: no cover def _junos_interfaces_ifconfig(out): """ Uses ifconfig to return a dictionary of interfaces with various information @@ -1074,6 +1075,7 @@ def _junos_interfaces_ifconfig(out): return ret +# pragma: no cover def junos_interfaces(): """ Obtain interface information for Junos; ifconfig @@ -1239,6 +1241,7 @@ def _get_iface_info(iface): return None, error_msg +# pragma: no cover def _hw_addr_aix(iface): """ Return the hardware address (a.k.a. MAC address) for a given interface on AIX @@ -1277,7 +1280,7 @@ def hw_addr(iface): """ if salt.utils.platform.is_aix(): - return _hw_addr_aix + return _hw_addr_aix(iface) iface_info, error = _get_iface_info(iface) @@ -1746,6 +1749,7 @@ def _netlink_tool_remote_on(port, which_end): return remotes +# pragma: no cover def _sunos_remotes_on(port, which_end): """ SunOS specific helper function. @@ -1786,6 +1790,7 @@ def _sunos_remotes_on(port, which_end): return remotes +# pragma: no cover def _freebsd_remotes_on(port, which_end): """ Returns set of ipv4 host addresses of remote established connections @@ -1848,6 +1853,7 @@ def _freebsd_remotes_on(port, which_end): return remotes +# pragma: no cover def _netbsd_remotes_on(port, which_end): """ Returns set of ipv4 host addresses of remote established connections @@ -1909,6 +1915,7 @@ def _netbsd_remotes_on(port, which_end): return remotes +# pragma: no cover def _openbsd_remotes_on(port, which_end): """ OpenBSD specific helper function. @@ -2053,6 +2060,7 @@ def _linux_remotes_on(port, which_end): return remotes +# pragma: no cover def _aix_remotes_on(port, which_end): """ AIX specific helper function. diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index a413280862b..00fb6c9a95c 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -142,6 +142,72 @@ IPV6_SUBNETS = { } +@pytest.fixture(scope="module") +def linux_interfaces_dict(): + return { + "eth0": { + "hwaddr": "e0:3f:49:85:6a:af", + "inet": [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ], + "inet6": [ + { + "address": "fe80::e23f:49ff:fe85:6aaf", + "prefixlen": "64", + "scope": "link", + } + ], + "up": True, + }, + "lo": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], + "up": True, + }, + } + + +@pytest.fixture(scope="module") +def freebsd_interfaces_dict(): + return { + "": {"up": False}, + "em0": { + "hwaddr": "00:30:48:ff:ff:ff", + "inet": [ + { + "address": "10.10.10.250", + "broadcast": "10.10.10.255", + "netmask": "255.255.255.224", + }, + { + "address": "10.10.10.56", + "broadcast": "10.10.10.63", + "netmask": "255.255.255.192", + }, + ], + "up": True, + }, + "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [ + {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, + {"address": "::1", "prefixlen": "128", "scope": None}, + ], + "up": True, + }, + "plip0": {"up": False}, + "tun0": { + "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], + "up": True, + }, + } + + def test_sanitize_host_ip(): ret = network.sanitize_host("10.1./2.$3") assert ret == "10.1.2.3" @@ -487,70 +553,14 @@ def test_hex2ip(): ) -def test_interfaces_ifconfig_linux(): +def test_interfaces_ifconfig_linux(linux_interfaces_dict): interfaces = network._interfaces_ifconfig(LINUX) - assert interfaces == { - "eth0": { - "hwaddr": "e0:3f:49:85:6a:af", - "inet": [ - { - "address": "10.10.10.56", - "broadcast": "10.10.10.255", - "netmask": "255.255.252.0", - } - ], - "inet6": [ - { - "address": "fe80::e23f:49ff:fe85:6aaf", - "prefixlen": "64", - "scope": "link", - } - ], - "up": True, - }, - "lo": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], - "up": True, - }, - } + assert interfaces == linux_interfaces_dict -def test_interfaces_ifconfig_freebsd(): +def test_interfaces_ifconfig_freebsd(freebsd_interfaces_dict): interfaces = network._interfaces_ifconfig(FREEBSD) - assert interfaces == { - "": {"up": False}, - "em0": { - "hwaddr": "00:30:48:ff:ff:ff", - "inet": [ - { - "address": "10.10.10.250", - "broadcast": "10.10.10.255", - "netmask": "255.255.255.224", - }, - { - "address": "10.10.10.56", - "broadcast": "10.10.10.63", - "netmask": "255.255.255.192", - }, - ], - "up": True, - }, - "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, - {"address": "::1", "prefixlen": "128", "scope": None}, - ], - "up": True, - }, - "plip0": {"up": False}, - "tun0": { - "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], - "up": True, - }, - } + assert interfaces == freebsd_interfaces_dict def test_interfaces_ifconfig_solaris(): @@ -1412,3 +1422,104 @@ def test_natural_ipv4_netmask(): ret = network.natural_ipv4_netmask("10.10.10.250", fmt="netmask") assert ret == "255.0.0.0" + + +def test_rpad_ipv4_network(): + ret = network.rpad_ipv4_network("127.0") + assert ret == "127.0.0.0" + ret = network.rpad_ipv4_network("192.168.3") + assert ret == "192.168.3.0" + ret = network.rpad_ipv4_network("10.209") + assert ret == "10.209.0.0" + + +def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + hw_addrs = network.hw_addr("eth0") + assert hw_addrs == "e0:3f:49:85:6a:af" + + with patch( + "salt.utils.network.interfaces", MagicMock(return_value=freebsd_interfaces_dict) + ), patch("salt.utils.platform.is_netbsd", MagicMock(return_value=True)): + hw_addrs = network.hw_addr("em0") + assert hw_addrs == "00:30:48:ff:ff:ff" + + hw_addrs = network.hw_addr("em1") + assert hw_addrs == "00:30:48:aa:aa:aa" + + hw_addrs = network.hw_addr("dog") + assert ( + hw_addrs + == 'Interface "dog" not in available interfaces: "", "em0", "em1", "lo0", "plip0", "tun0"' + ) + + +def test_interface_and_ip(linux_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.interface("eth0") + assert ret == [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ] + + ret = network.interface("dog") + assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' + + ret = network.interface_ip("eth0") + assert ret == "10.10.10.56" + + ret = network.interface_ip("dog") + assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' + + +def test_subnets(linux_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.subnets() + assert ret == ["10.10.8.0/22"] + + ret = network.subnets6() + assert ret == ["fe80::/64"] + + +def test_in_subnet(caplog): + assert network.in_subnet("fe80::/64", "fe80::e23f:49ff:fe85:6aaf") + + assert network.in_subnet("10.10.8.0/22", "10.10.10.56") + + assert not network.in_subnet("10.10.8.0/22") + + caplog.clear() + ret = network.in_subnet("10.10.8.0/40") + assert "Invalid CIDR '10.10.8.0/40'" in caplog.text + assert not ret + + +def test_ip_addrs(linux_interfaces_dict): + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.ip_addrs("eth0") + assert ret == ["10.10.10.56"] + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.ip_addrs6("eth0") + assert ret == ["fe80::e23f:49ff:fe85:6aaf"] From a4a11986b3c91e803d6c08bb4cd10ab365189e5a Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 16 Oct 2023 09:33:30 -0600 Subject: [PATCH 070/196] Added debugging for localhost tests --- tests/pytests/unit/utils/test_network.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 00fb6c9a95c..be8c763c490 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1388,6 +1388,15 @@ def test_ip_to_host(grains): assert ret is None ret = network.ip_to_host("::1") + + ## DGM + dgm_grains_os = grains["os"] + dgm_grains_os_family = grains["os_family"] + dgm_grains_osmajorversion = grains["osmajorversion"] + dgm_grains_oscodename = grains["oscodename"] + dgm_strg = f"\nDGM localhost test grains os '{dgm_grains_os}', family '{dgm_grains_os_family}', major version '{dgm_grains_osmajorversion}', code name '{dgm_grains_oscodename}'\n\n" + print(dgm_strg) + if grains["os"] == "Amazon": assert ret == "localhost6" elif grains["os_family"] == "Debian": From 3fb20089bb2dce81fe4628dee30003b667c40907 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 17 Oct 2023 10:50:10 -0600 Subject: [PATCH 071/196] Cleaned up typo --- tests/pytests/unit/utils/test_network.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index be8c763c490..1e887a80de9 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1392,9 +1392,9 @@ def test_ip_to_host(grains): ## DGM dgm_grains_os = grains["os"] dgm_grains_os_family = grains["os_family"] - dgm_grains_osmajorversion = grains["osmajorversion"] + dgm_grains_osmajorrelease = grains["osmajorrelease"] dgm_grains_oscodename = grains["oscodename"] - dgm_strg = f"\nDGM localhost test grains os '{dgm_grains_os}', family '{dgm_grains_os_family}', major version '{dgm_grains_osmajorversion}', code name '{dgm_grains_oscodename}'\n\n" + dgm_strg = f"\nDGM localhost test grains os '{dgm_grains_os}', family '{dgm_grains_os_family}', major release '{dgm_grains_osmajorrelease}', code name '{dgm_grains_oscodename}'\n\n" print(dgm_strg) if grains["os"] == "Amazon": From 66fd7a50968640f6e1739c4b2f2361b7e8ae1953 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 19 Oct 2023 14:36:59 -0600 Subject: [PATCH 072/196] Update localhost and IPv6 tests for Debian 12 and Arch --- tests/pytests/unit/utils/test_network.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 1e887a80de9..1b5e5e43b4d 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1389,18 +1389,10 @@ def test_ip_to_host(grains): ret = network.ip_to_host("::1") - ## DGM - dgm_grains_os = grains["os"] - dgm_grains_os_family = grains["os_family"] - dgm_grains_osmajorrelease = grains["osmajorrelease"] - dgm_grains_oscodename = grains["oscodename"] - dgm_strg = f"\nDGM localhost test grains os '{dgm_grains_os}', family '{dgm_grains_os_family}', major release '{dgm_grains_osmajorrelease}', code name '{dgm_grains_oscodename}'\n\n" - print(dgm_strg) - if grains["os"] == "Amazon": assert ret == "localhost6" elif grains["os_family"] == "Debian": - if grains["osmajorrelease"] == "12": + if grains["osmajorrelease"] == 12: assert ret == "localhost" else: assert ret == "ip6-localhost" @@ -1409,6 +1401,8 @@ def test_ip_to_host(grains): assert ret == "ipv6-localhost" else: assert ret == "localhost" + elif grains["os_family"] == "Arch": + assert ret == "ip6-localhost" else: assert ret == "localhost" From 6809e971464edfd178e7fabec79c6b06dec88c63 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 24 Oct 2023 17:46:20 -0600 Subject: [PATCH 073/196] Added debug logic to determine correct value for test localhost on IPv6 --- tests/pytests/unit/utils/test_network.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 1b5e5e43b4d..def9be64941 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1389,6 +1389,13 @@ def test_ip_to_host(grains): ret = network.ip_to_host("::1") + dgm_os = grains["os"] + dgm_fam = grains["os_family"] + dgm_codename = grains["oscodename"] + print( + f"DGM grains os '{dgm_os}', os_family '{dgm_fam}', oscodename '{dgm_codename}', ret '{ret}'" + ) + if grains["os"] == "Amazon": assert ret == "localhost6" elif grains["os_family"] == "Debian": @@ -1402,7 +1409,7 @@ def test_ip_to_host(grains): else: assert ret == "localhost" elif grains["os_family"] == "Arch": - assert ret == "ip6-localhost" + assert ret == "localhost" else: assert ret == "localhost" From 903778d83f36845d78e8ff86afc20ed3a4f40d82 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 26 Oct 2023 15:31:29 -0600 Subject: [PATCH 074/196] Re-arranged the IPv6 localhost test and allowed for forms of Arch --- tests/pytests/unit/utils/test_network.py | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index def9be64941..c1586c70616 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1388,14 +1388,6 @@ def test_ip_to_host(grains): assert ret is None ret = network.ip_to_host("::1") - - dgm_os = grains["os"] - dgm_fam = grains["os_family"] - dgm_codename = grains["oscodename"] - print( - f"DGM grains os '{dgm_os}', os_family '{dgm_fam}', oscodename '{dgm_codename}', ret '{ret}'" - ) - if grains["os"] == "Amazon": assert ret == "localhost6" elif grains["os_family"] == "Debian": @@ -1403,13 +1395,16 @@ def test_ip_to_host(grains): assert ret == "localhost" else: assert ret == "ip6-localhost" + elif grains["oscodename"] == "Photon": + assert ret == "ipv6-localhost" elif grains["os_family"] == "RedHat": - if grains["oscodename"] == "Photon": - assert ret == "ipv6-localhost" - else: - assert ret == "localhost" - elif grains["os_family"] == "Arch": assert ret == "localhost" + elif grains["os_family"] == "Arch": + if grains.get("osmajorrelease", None) is None: + # running doesn't have osmajorrelease grains + assert ret == "localhost" + else: + assert ret == "ip6-localhost" else: assert ret == "localhost" From beea32d839ce1c8dc32bc50d321c9a59cd634ade Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Fri, 27 Oct 2023 15:49:54 -0600 Subject: [PATCH 075/196] Allow for Photon returning the wrong thing for IPv4 localhost test --- tests/pytests/unit/utils/test_network.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index c1586c70616..7aeb4004cb5 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1382,7 +1382,11 @@ def test_get_socket(): def test_ip_to_host(grains): ret = network.ip_to_host("127.0.0.1") - assert ret == "localhost" + if grains["oscodename"] == "Photon": + # Photon returns this for IPv4 + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" ret = network.ip_to_host("2001:a71::1") assert ret is None @@ -1395,10 +1399,11 @@ def test_ip_to_host(grains): assert ret == "localhost" else: assert ret == "ip6-localhost" - elif grains["oscodename"] == "Photon": - assert ret == "ipv6-localhost" elif grains["os_family"] == "RedHat": - assert ret == "localhost" + if grains["oscodename"] == "Photon": + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" elif grains["os_family"] == "Arch": if grains.get("osmajorrelease", None) is None: # running doesn't have osmajorrelease grains From ac2bedab23f8e225c0bdc4e4644738707a100cfe Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Mon, 6 Nov 2023 15:26:19 -0700 Subject: [PATCH 076/196] Update tests for salt/utils/network.py as per reviewer, and fix netmask bug --- salt/utils/network.py | 33 +- tests/pytests/unit/utils/test_network.py | 390 ++++++++++++----------- 2 files changed, 217 insertions(+), 206 deletions(-) diff --git a/salt/utils/network.py b/salt/utils/network.py index d327d6216c9..9566f433444 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py @@ -674,6 +674,7 @@ def cidr_to_ipv4_netmask(cidr_bits): else: netmask += "{:d}".format(256 - (2 ** (8 - cidr_bits))) cidr_bits = 0 + return netmask @@ -682,8 +683,14 @@ def _number_of_set_bits_to_ipv4_netmask(set_bits): Returns an IPv4 netmask from the integer representation of that mask. Ex. 0xffffff00 -> '255.255.255.0' + 0xffff6400 -> '255.255.100.0' """ - return cidr_to_ipv4_netmask(_number_of_set_bits(set_bits)) + # Note: previously used cidr but that is counting number of bits in set_bits + # and can lead to wrong netmaks values, for example: + # 0xFFFF6400 is 255.255.100.0, 0x64 is 100 decimal + # but if convert to cidr first, it gives 19 bits, get 255.255.224.0 - WRONG + # leveraging Python ip_address library for different method of conversion + return str(ipaddress.ip_address(set_bits)) def _number_of_set_bits(x): @@ -1004,8 +1011,7 @@ def _netbsd_interfaces_ifconfig(out): return ret -# pragma: no cover -def _junos_interfaces_ifconfig(out): +def _junos_interfaces_ifconfig(out): # pragma: no cover """ Uses ifconfig to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) @@ -1075,8 +1081,7 @@ def _junos_interfaces_ifconfig(out): return ret -# pragma: no cover -def junos_interfaces(): +def junos_interfaces(): # pragma: no cover """ Obtain interface information for Junos; ifconfig output diverged from other BSD variants (Netmask is now part of the @@ -1241,8 +1246,7 @@ def _get_iface_info(iface): return None, error_msg -# pragma: no cover -def _hw_addr_aix(iface): +def _hw_addr_aix(iface): # pragma: no cover """ Return the hardware address (a.k.a. MAC address) for a given interface on AIX MAC address not available in through interfaces @@ -1749,8 +1753,7 @@ def _netlink_tool_remote_on(port, which_end): return remotes -# pragma: no cover -def _sunos_remotes_on(port, which_end): +def _sunos_remotes_on(port, which_end): # pragma: no cover """ SunOS specific helper function. Returns set of ipv4 host addresses of remote established connections @@ -1790,8 +1793,7 @@ def _sunos_remotes_on(port, which_end): return remotes -# pragma: no cover -def _freebsd_remotes_on(port, which_end): +def _freebsd_remotes_on(port, which_end): # pragma: no cover """ Returns set of ipv4 host addresses of remote established connections on local tcp port port. @@ -1853,8 +1855,7 @@ def _freebsd_remotes_on(port, which_end): return remotes -# pragma: no cover -def _netbsd_remotes_on(port, which_end): +def _netbsd_remotes_on(port, which_end): # pragma: no cover """ Returns set of ipv4 host addresses of remote established connections on local tcp port port. @@ -1915,8 +1916,7 @@ def _netbsd_remotes_on(port, which_end): return remotes -# pragma: no cover -def _openbsd_remotes_on(port, which_end): +def _openbsd_remotes_on(port, which_end): # pragma: no cover """ OpenBSD specific helper function. Returns set of ipv4 host addresses of remote established connections @@ -2060,8 +2060,7 @@ def _linux_remotes_on(port, which_end): return remotes -# pragma: no cover -def _aix_remotes_on(port, which_end): +def _aix_remotes_on(port, which_end): # pragma: no cover """ AIX specific helper function. Returns set of ipv4 host addresses of remote established connections diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 7aeb4004cb5..b6e080e1b28 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -142,6 +142,9 @@ IPV6_SUBNETS = { } +_ip = ipaddress.ip_address + + @pytest.fixture(scope="module") def linux_interfaces_dict(): return { @@ -289,71 +292,108 @@ def test_is_ip(): assert not network.is_ipv6("sixteen-char-str") -def test_is_ipv4(): - assert network.is_ipv4("10.10.0.3") - assert not network.is_ipv4("10.100.1") - assert not network.is_ipv4("2001:db8:0:1:1:1:1:1") - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - assert not network.is_ipv4("sixteen-char-str") +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", True), + ("10.100.1", False), + ("2001:db8:0:1:1:1:1:1", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ipv4(addr, expected): + assert network.is_ipv4(addr) is expected -def test_is_ipv6(): - assert network.is_ipv6("2001:db8:0:1:1:1:1:1") - assert network.is_ipv6("0:0:0:0:0:0:0:1") - assert network.is_ipv6("::1") - assert network.is_ipv6("::") - assert network.is_ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334") - assert network.is_ipv6("2001:0db8:85a3::8a2e:0370:7334") - assert not network.is_ipv6("2001:0db8:0370:7334") - assert not network.is_ipv6("2001:0db8:::0370:7334") - assert not network.is_ipv6("10.0.1.2") - assert not network.is_ipv6("2001.0db8.85a3.0000.0000.8a2e.0370.7334") - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - assert not network.is_ipv6("sixteen-char-str") +@pytest.mark.parametrize( + "addr,expected", + ( + ("2001:db8:0:1:1:1:1:1", True), + ("0:0:0:0:0:0:0:1", True), + ("::1", True), + ("::", True), + ("2001:0db8:85a3:0000:0000:8a2e:0370:7334", True), + ("2001:0db8:85a3::8a2e:0370:7334", True), + ("2001:0db8:0370:7334", False), + ("2001:0db8:::0370:7334", False), + ("10.0.1.2", False), + ("2001.0db8.85a3.0000.0000.8a2e.0370.7334", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ipv6(addr, expected): + assert network.is_ipv6(addr) is expected -def test_ipv6(): - assert network.ipv6("2001:db8:0:1:1:1:1:1") - assert network.ipv6("0:0:0:0:0:0:0:1") - assert network.ipv6("::1") - assert network.ipv6("::") - assert network.ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334") - assert network.ipv6("2001:0db8:85a3::8a2e:0370:7334") - assert network.ipv6("2001:67c:2e8::/48") +@pytest.mark.parametrize( + "addr,expected", + ( + ("2001:db8:0:1:1:1:1:1", "2001:db8:0:1:1:1:1:1"), + ("0:0:0:0:0:0:0:1", "::1"), + ("::1", "::1"), + ("::", "::"), + ("2001:0db8:85a3:0000:0000:8a2e:0370:7334", "2001:db8:85a3::8a2e:370:7334"), + ("2001:0db8:85a3::8a2e:0370:7334", "2001:db8:85a3::8a2e:370:7334"), + ("2001:67c:2e8::/48", "2001:67c:2e8::/48"), + ), +) +def test_ipv6(addr, expected): + assert network.ipv6(addr) == expected -def test_is_loopback(): - assert network.is_loopback("127.0.1.1") - assert network.is_loopback("::1") - assert not network.is_loopback("10.0.1.2") - assert not network.is_loopback("2001:db8:0:1:1:1:1:1") - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - assert not network.is_ipv6("sixteen-char-str") +@pytest.mark.parametrize( + "addr,expected", + ( + ("127.0.1.1", True), + ("::1", True), + ("10.0.1.2", False), + ("2001:db8:0:1:1:1:1:1", False), + ), +) +def test_is_loopback(addr, expected): + assert network.is_loopback(addr) is expected -def test_parse_host_port(): - _ip = ipaddress.ip_address - good_host_ports = { - "10.10.0.3": (_ip("10.10.0.3").compressed, None), - "10.10.0.3:1234": (_ip("10.10.0.3").compressed, 1234), - "2001:0db8:85a3::8a2e:0370:7334": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - None, +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", (_ip("10.10.0.3").compressed, None)), + ("10.10.0.3:1234", (_ip("10.10.0.3").compressed, 1234)), + ( + "2001:0db8:85a3::8a2e:0370:7334", + ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + None, + ), ), - "[2001:0db8:85a3::8a2e:0370:7334]:1234": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - 1234, + ( + "[2001:0db8:85a3::8a2e:0370:7334]:1234", + ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + 1234, + ), ), - "2001:0db8:85a3::7334": (_ip("2001:0db8:85a3::7334").compressed, None), - "[2001:0db8:85a3::7334]:1234": ( - _ip("2001:0db8:85a3::7334").compressed, - 1234, + ("2001:0db8:85a3::7334", (_ip("2001:0db8:85a3::7334").compressed, None)), + ( + "[2001:0db8:85a3::7334]:1234", + ( + _ip("2001:0db8:85a3::7334").compressed, + 1234, + ), ), - } - bad_host_ports = [ + ), +) +def test_parse_host_port_good(addr, expected): + assert network.parse_host_port(addr) == expected + + +@pytest.mark.parametrize( + "addr", + ( "10.10.0.3/24", "10.10.0.3::1234", "2001:0db8:0370:7334", @@ -362,21 +402,11 @@ def test_parse_host_port(): "host name", "host name:1234", "10.10.0.3:abcd", - ] - for host_port, assertion_value in good_host_ports.items(): - host = port = None - host, port = network.parse_host_port(host_port) - assert (host, port) == assertion_value - - for host_port in bad_host_ports: - try: - pytest.raises(ValueError, network.parse_host_port, host_port) - except AssertionError as _e_: - log.error( - 'bad host_port value: "%s" failed to trigger ValueError exception', - host_port, - ) - raise _e_ + ), +) +def test_parse_host_port_bad_raises_value_error(addr): + with pytest.raises(ValueError): + network.parse_host_port(addr) def test_dns_check(): @@ -525,32 +555,41 @@ def test_is_ipv6_subnet(): assert not network.is_ipv6_subnet(item) -def test_cidr_to_ipv4_netmask(): - assert network.cidr_to_ipv4_netmask(24) == "255.255.255.0" - assert network.cidr_to_ipv4_netmask(21) == "255.255.248.0" - assert network.cidr_to_ipv4_netmask(17) == "255.255.128.0" - assert network.cidr_to_ipv4_netmask(9) == "255.128.0.0" - assert network.cidr_to_ipv4_netmask(36) == "" - assert network.cidr_to_ipv4_netmask("lol") == "" +@pytest.mark.parametrize( + "addr,expected", + ( + (24, "255.255.255.0"), + (21, "255.255.248.0"), + (17, "255.255.128.0"), + (9, "255.128.0.0"), + (36, ""), + ("lol", ""), + ), +) +def test_cidr_to_ipv4_netmask(addr, expected): + assert network.cidr_to_ipv4_netmask(addr) == expected def test_number_of_set_bits_to_ipv4_netmask(): set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) assert set_bits_to_netmask == "255.255.255.0" set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) + assert set_bits_to_netmask == "255.255.100.0" -def test_hex2ip(): - assert network.hex2ip("0x4A7D2B63") == "74.125.43.99" - assert network.hex2ip("0x4A7D2B63", invert=True) == "99.43.125.74" - assert network.hex2ip("00000000000000000000FFFF7F000001") == "127.0.0.1" - assert ( - network.hex2ip("0000000000000000FFFF00000100007F", invert=True) == "127.0.0.1" - ) - assert network.hex2ip("20010DB8000000000000000000000000") == "2001:db8::" - assert ( - network.hex2ip("B80D0120000000000000000000000000", invert=True) == "2001:db8::" - ) +@pytest.mark.parametrize( + "hex_num,inversion,expected", + ( + ("0x4A7D2B63", False, "74.125.43.99"), + ("0x4A7D2B63", True, "99.43.125.74"), + ("00000000000000000000FFFF7F000001", False, "127.0.0.1"), + ("0000000000000000FFFF00000100007F", True, "127.0.0.1"), + ("20010DB8000000000000000000000000", False, "2001:db8::"), + ("B80D0120000000000000000000000000", True, "2001:db8::"), + ), +) +def test_hex2ip(hex_num, inversion, expected): + assert network.hex2ip(hex_num, inversion) == expected def test_interfaces_ifconfig_linux(linux_interfaces_dict): @@ -564,7 +603,7 @@ def test_interfaces_ifconfig_freebsd(freebsd_interfaces_dict): def test_interfaces_ifconfig_solaris(): - with patch("salt.utils.platform.is_sunos", lambda: True): + with patch("salt.utils.platform.is_sunos", return_value=True): interfaces = network._interfaces_ifconfig(SOLARIS) expected_interfaces = { "ilbint0": { @@ -649,16 +688,16 @@ def test_interfaces_ifconfig_netbsd(): def test_freebsd_remotes_on(): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_freebsd", return_value=True): with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): remotes = network._freebsd_remotes_on("4506", "remote") assert remotes == {"127.0.0.1"} def test_freebsd_remotes_on_with_fat_pid(): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_freebsd", return_value=True): with patch( "subprocess.check_output", return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, @@ -668,8 +707,8 @@ def test_freebsd_remotes_on_with_fat_pid(): def test_netlink_tool_remote_on_a(): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_linux", lambda: True): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_linux", return_value=True): with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): remotes = network._netlink_tool_remote_on("4506", "local_port") assert remotes == {"192.168.122.177", "::ffff:127.0.0.1"} @@ -700,8 +739,6 @@ def test_openbsd_remotes_on_issue_61966(): def test_generate_minion_id_distinct(): """ Test if minion IDs are distinct in the pool. - - :return: """ with patch("platform.node", MagicMock(return_value="nodename")), patch( "socket.gethostname", MagicMock(return_value="hostname") @@ -728,8 +765,6 @@ def test_generate_minion_id_distinct(): def test_generate_minion_id_127_name(): """ Test if minion IDs can be named 127.foo - - :return: """ with patch("platform.node", MagicMock(return_value="127")), patch( "socket.gethostname", MagicMock(return_value="127") @@ -753,8 +788,6 @@ def test_generate_minion_id_127_name(): def test_generate_minion_id_127_name_startswith(): """ Test if minion IDs can be named starting from "127" - - :return: """ with patch("platform.node", MagicMock(return_value="127890")), patch( "socket.gethostname", MagicMock(return_value="127890") @@ -780,8 +813,6 @@ def test_generate_minion_id_127_name_startswith(): def test_generate_minion_id_duplicate(): """ Test if IP addresses in the minion IDs are distinct in the pool - - :return: """ with patch("platform.node", MagicMock(return_value="hostname")), patch( "socket.gethostname", MagicMock(return_value="hostname") @@ -801,8 +832,6 @@ def test_generate_minion_id_platform_used(): """ Test if platform.node is used for the first occurrence. The platform.node is most common hostname resolver before anything else. - - :return: """ with patch( "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") @@ -823,8 +852,6 @@ def test_generate_minion_id_platform_used(): def test_generate_minion_id_platform_localhost_filtered(): """ Test if localhost is filtered from the first occurrence. - - :return: """ with patch("platform.node", MagicMock(return_value="localhost")), patch( "socket.gethostname", MagicMock(return_value="pick.me") @@ -845,8 +872,6 @@ def test_generate_minion_id_platform_localhost_filtered(): def test_generate_minion_id_platform_localhost_filtered_all(): """ Test if any of the localhost is filtered from everywhere. - - :return: """ with patch("platform.node", MagicMock(return_value="localhost")), patch( "socket.gethostname", MagicMock(return_value="ip6-loopback") @@ -865,8 +890,6 @@ def test_generate_minion_id_platform_localhost_filtered_all(): def test_generate_minion_id_platform_localhost_only(): """ Test if there is no other choice but localhost. - - :return: """ with patch("platform.node", MagicMock(return_value="localhost")), patch( "socket.gethostname", MagicMock(return_value="ip6-loopback") @@ -885,8 +908,6 @@ def test_generate_minion_id_platform_localhost_only(): def test_generate_minion_id_platform_fqdn(): """ Test if fqdn is picked up. - - :return: """ with patch("platform.node", MagicMock(return_value="localhost")), patch( "socket.gethostname", MagicMock(return_value="ip6-loopback") @@ -905,8 +926,6 @@ def test_generate_minion_id_platform_fqdn(): def test_generate_minion_id_platform_localhost_addrinfo(): """ Test if addinfo is picked up. - - :return: """ with patch("platform.node", MagicMock(return_value="localhost")), patch( "socket.gethostname", MagicMock(return_value="ip6-loopback") @@ -925,8 +944,6 @@ def test_generate_minion_id_platform_localhost_addrinfo(): def test_generate_minion_id_platform_ip_addr_only(): """ Test if IP address is the only what is used as a Minion ID in case no DNS name. - - :return: """ with patch("platform.node", MagicMock(return_value="localhost")), patch( "socket.gethostname", MagicMock(return_value="ip6-loopback") @@ -970,7 +987,7 @@ def test_generate_minion_id_with_long_hostname(): with patch("socket.gethostname", MagicMock(return_value=long_name)): # An exception is raised if unicode is passed to socket.getfqdn minion_id = network.generate_minion_id() - assert minion_id != "", minion_id + assert minion_id != "" def test_filter_by_networks_with_no_filter(): @@ -1034,34 +1051,34 @@ def test_ip_networks(): # Without loopback ret = network.ip_networks(interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret + assert ret == ["10.10.8.0/22"] # Without loopback, specific interface ret = network.ip_networks(interface="eth0", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret + assert ret == ["10.10.8.0/22"] # Without loopback, multiple specific interfaces ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret + assert ret == ["10.10.8.0/22"] # Without loopback, specific interface (not present) ret = network.ip_networks(interface="eth1", interface_data=interface_data) - assert ret == [], ret + assert ret == [] # With loopback ret = network.ip_networks(include_loopback=True, interface_data=interface_data) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret + assert ret == ["10.10.8.0/22", "127.0.0.0/8"] # With loopback, specific interface ret = network.ip_networks( interface="eth0", include_loopback=True, interface_data=interface_data ) - assert ret == ["10.10.8.0/22"], ret + assert ret == ["10.10.8.0/22"] # With loopback, multiple specific interfaces ret = network.ip_networks( interface="eth0,lo", include_loopback=True, interface_data=interface_data ) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret + assert ret == ["10.10.8.0/22", "127.0.0.0/8"] # With loopback, specific interface (not present) ret = network.ip_networks( interface="eth1", include_loopback=True, interface_data=interface_data ) - assert ret == [], ret + assert ret == [] # Verbose, without loopback ret = network.ip_networks(verbose=True, interface_data=interface_data) @@ -1072,7 +1089,7 @@ def test_ip_networks(): "num_addresses": 1024, "address": "10.10.8.0", }, - }, ret + } # Verbose, without loopback, specific interface ret = network.ip_networks( interface="eth0", verbose=True, interface_data=interface_data @@ -1084,7 +1101,7 @@ def test_ip_networks(): "num_addresses": 1024, "address": "10.10.8.0", }, - }, ret + } # Verbose, without loopback, multiple specific interfaces ret = network.ip_networks( interface="eth0,lo", verbose=True, interface_data=interface_data @@ -1096,12 +1113,12 @@ def test_ip_networks(): "num_addresses": 1024, "address": "10.10.8.0", }, - }, ret + } # Verbose, without loopback, specific interface (not present) ret = network.ip_networks( interface="eth1", verbose=True, interface_data=interface_data ) - assert ret == {}, ret + assert ret == {} # Verbose, with loopback ret = network.ip_networks( include_loopback=True, verbose=True, interface_data=interface_data @@ -1119,7 +1136,7 @@ def test_ip_networks(): "num_addresses": 16777216, "address": "127.0.0.0", }, - }, ret + } # Verbose, with loopback, specific interface ret = network.ip_networks( interface="eth0", @@ -1134,7 +1151,7 @@ def test_ip_networks(): "num_addresses": 1024, "address": "10.10.8.0", }, - }, ret + } # Verbose, with loopback, multiple specific interfaces ret = network.ip_networks( interface="eth0,lo", @@ -1155,7 +1172,7 @@ def test_ip_networks(): "num_addresses": 16777216, "address": "127.0.0.0", }, - }, ret + } # Verbose, with loopback, specific interface (not present) ret = network.ip_networks( interface="eth1", @@ -1163,7 +1180,7 @@ def test_ip_networks(): verbose=True, interface_data=interface_data, ) - assert ret == {}, ret + assert ret == {} def test_ip_networks6(): @@ -1174,34 +1191,34 @@ def test_ip_networks6(): # Without loopback ret = network.ip_networks6(interface_data=interface_data) - assert ret == ["fe80::/64"], ret + assert ret == ["fe80::/64"] # Without loopback, specific interface ret = network.ip_networks6(interface="eth0", interface_data=interface_data) - assert ret == ["fe80::/64"], ret + assert ret == ["fe80::/64"] # Without loopback, multiple specific interfaces ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) - assert ret == ["fe80::/64"], ret + assert ret == ["fe80::/64"] # Without loopback, specific interface (not present) ret = network.ip_networks6(interface="eth1", interface_data=interface_data) - assert ret == [], ret + assert ret == [] # With loopback ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) - assert ret == ["::1/128", "fe80::/64"], ret + assert ret == ["::1/128", "fe80::/64"] # With loopback, specific interface ret = network.ip_networks6( interface="eth0", include_loopback=True, interface_data=interface_data ) - assert ret == ["fe80::/64"], ret + assert ret == ["fe80::/64"] # With loopback, multiple specific interfaces ret = network.ip_networks6( interface="eth0,lo", include_loopback=True, interface_data=interface_data ) - assert ret == ["::1/128", "fe80::/64"], ret + assert ret == ["::1/128", "fe80::/64"] # With loopback, specific interface (not present) ret = network.ip_networks6( interface="eth1", include_loopback=True, interface_data=interface_data ) - assert ret == [], ret + assert ret == [] # Verbose, without loopback ret = network.ip_networks6(verbose=True, interface_data=interface_data) @@ -1212,7 +1229,7 @@ def test_ip_networks6(): "num_addresses": 18446744073709551616, "address": "fe80::", }, - }, ret + } # Verbose, without loopback, specific interface ret = network.ip_networks6( interface="eth0", verbose=True, interface_data=interface_data @@ -1224,7 +1241,7 @@ def test_ip_networks6(): "num_addresses": 18446744073709551616, "address": "fe80::", }, - }, ret + } # Verbose, without loopback, multiple specific interfaces ret = network.ip_networks6( interface="eth0,lo", verbose=True, interface_data=interface_data @@ -1236,12 +1253,12 @@ def test_ip_networks6(): "num_addresses": 18446744073709551616, "address": "fe80::", }, - }, ret + } # Verbose, without loopback, specific interface (not present) ret = network.ip_networks6( interface="eth1", verbose=True, interface_data=interface_data ) - assert ret == {}, ret + assert ret == {} # Verbose, with loopback ret = network.ip_networks6( include_loopback=True, verbose=True, interface_data=interface_data @@ -1259,7 +1276,7 @@ def test_ip_networks6(): "num_addresses": 1, "address": "::1", }, - }, ret + } # Verbose, with loopback, specific interface ret = network.ip_networks6( interface="eth0", @@ -1274,7 +1291,7 @@ def test_ip_networks6(): "num_addresses": 18446744073709551616, "address": "fe80::", }, - }, ret + } # Verbose, with loopback, multiple specific interfaces ret = network.ip_networks6( interface="eth0,lo", @@ -1295,7 +1312,7 @@ def test_ip_networks6(): "num_addresses": 1, "address": "::1", }, - }, ret + } # Verbose, with loopback, specific interface (not present) ret = network.ip_networks6( interface="eth1", @@ -1303,14 +1320,12 @@ def test_ip_networks6(): verbose=True, interface_data=interface_data, ) - assert ret == {}, ret + assert ret == {} def test_get_fqhostname_return(): """ Test if proper hostname is used when RevDNS differ from hostname - - :return: """ with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( "socket.getfqdn", @@ -1342,17 +1357,18 @@ def test_get_fqhostname_return_empty_hostname(): assert network.get_fqhostname() == host -def test_ip_bracket(): - test_ipv4 = "127.0.0.1" - test_ipv6 = "::1" - test_ipv6_uri = "[::1]" - assert test_ipv4 == network.ip_bracket(test_ipv4) - assert test_ipv6 == network.ip_bracket(test_ipv6_uri, strip=True) - assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6) - assert "[{}]".format(test_ipv6) == network.ip_bracket(test_ipv6_uri) - - ip_addr_obj = ipaddress.ip_address(test_ipv4) - assert test_ipv4 == network.ip_bracket(ip_addr_obj) +@pytest.mark.parametrize( + "addr,expected,strip", + ( + ("127.0.0.1", "127.0.0.1", False), + ("[::1]", "::1", True), + ("::1", "[::1]", False), + ("[::1]", "[::1]", False), + (ipaddress.ip_address("127.0.0.1"), "127.0.0.1", False), + ), +) +def test_ip_bracket(addr, expected, strip): + assert network.ip_bracket(addr, strip=strip) == expected def test_junos_ifconfig_output_parsing(): @@ -1414,33 +1430,31 @@ def test_ip_to_host(grains): assert ret == "localhost" -def test_natural_ipv4_netmask(): - ret = network.natural_ipv4_netmask("192.168.0.115") - assert ret == "/24" - - ret = network.natural_ipv4_netmask("192.168.1.80") - assert ret == "/24" - - ret = network.natural_ipv4_netmask("10.10.10.250") - assert ret == "/8" - - ret = network.natural_ipv4_netmask("192.168.0.115", fmt="netmask") - assert ret == "255.255.255.0" - - ret = network.natural_ipv4_netmask("192.168.1.80", fmt="netmask") - assert ret == "255.255.255.0" - - ret = network.natural_ipv4_netmask("10.10.10.250", fmt="netmask") - assert ret == "255.0.0.0" +@pytest.mark.parametrize( + "addr,fmtr,expected", + ( + ("192.168.0.115", "prefixlen", "/24"), + ("192.168.1.80", "prefixlen", "/24"), + ("10.10.10.250", "prefixlen", "/8"), + ("192.168.0.115", "netmask", "255.255.255.0"), + ("192.168.1.80", "netmask", "255.255.255.0"), + ("10.10.10.250", "netmask", "255.0.0.0"), + ), +) +def test_natural_ipv4_netmask(addr, fmtr, expected): + assert network.natural_ipv4_netmask(addr, fmt=fmtr) == expected -def test_rpad_ipv4_network(): - ret = network.rpad_ipv4_network("127.0") - assert ret == "127.0.0.0" - ret = network.rpad_ipv4_network("192.168.3") - assert ret == "192.168.3.0" - ret = network.rpad_ipv4_network("10.209") - assert ret == "10.209.0.0" +@pytest.mark.parametrize( + "addr,expected", + ( + ("127.0", "127.0.0.0"), + ("192.168.3", "192.168.3.0"), + ("10.209", "10.209.0.0"), + ), +) +def test_rpad_ipv4_network(addr, expected): + assert network.rpad_ipv4_network(addr) == expected def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict): @@ -1508,9 +1522,7 @@ def test_subnets(linux_interfaces_dict): def test_in_subnet(caplog): assert network.in_subnet("fe80::/64", "fe80::e23f:49ff:fe85:6aaf") - assert network.in_subnet("10.10.8.0/22", "10.10.10.56") - assert not network.in_subnet("10.10.8.0/22") caplog.clear() From 350f04e0583c21671f6b3fb62ca783d3581ef065 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 7 Nov 2023 07:48:44 -0700 Subject: [PATCH 077/196] Updated test per reviewer comments --- tests/pytests/unit/utils/test_network.py | 34 ++++++++++-------------- 1 file changed, 14 insertions(+), 20 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index b6e080e1b28..c807502b6e2 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -231,23 +231,17 @@ def test_host_to_ips(): assertion. """ - # pylint doesn't like the }[host] below, disable typecheck - # pylint: disable=all def getaddrinfo_side_effect(host, *args): - try: - return { - "github.com": [ - (2, 1, 6, "", ("192.30.255.112", 0)), - (2, 1, 6, "", ("192.30.255.113", 0)), - ], - "ipv6host.foo": [ - (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), - ], - }[host] - except KeyError: - raise socket.gaierror(-2, "Name or service not known") - - # pylint: enable=all + if host == "github.com": + return [ + (2, 1, 6, "", ("192.30.255.112", 0)), + (2, 1, 6, "", ("192.30.255.113", 0)), + ] + if host == "ipv6host.foo": + return [ + (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), + ] + raise socket.gaierror(-2, "Name or service not known") getaddrinfo_mock = MagicMock(side_effect=getaddrinfo_side_effect) with patch.object(socket, "getaddrinfo", getaddrinfo_mock): @@ -1025,12 +1019,12 @@ def test_filter_by_networks_interfaces_dict(): "10.0.123.201", ], } - assert network.filter_by_networks( - interfaces, ["192.168.1.0/24", "2001:db8::/48"] - ) == { + expected = { "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], } + ret = network.filter_by_networks(interfaces, ["192.168.1.0/24", "2001:db8::/48"]) + assert ret == expected def test_filter_by_networks_catch_all(): @@ -1040,7 +1034,7 @@ def test_filter_by_networks_catch_all(): "193.124.233.5", "fe80::d210:cf3f:64e7:5423", ] - assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) + assert network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) == ips def test_ip_networks(): From 14fdf4993979e64d3c53fd045cf606c9576f2a57 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 7 Nov 2023 08:54:41 -0700 Subject: [PATCH 078/196] Updated for further reviewer comments --- tests/pytests/unit/utils/test_network.py | 100 +++++++++++++---------- 1 file changed, 55 insertions(+), 45 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index c807502b6e2..be97a9f200b 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1,7 +1,6 @@ import logging import socket import textwrap -import time import pytest @@ -283,7 +282,7 @@ def test_is_ip(): assert not network.is_ip("0.9.800.1000") # Check 16-char-long unicode string # https://github.com/saltstack/salt/issues/51258 - assert not network.is_ipv6("sixteen-char-str") + assert not network.is_ip("sixteen-char-str") @pytest.mark.parametrize( @@ -403,48 +402,60 @@ def test_parse_host_port_bad_raises_value_error(addr): network.parse_host_port(addr) -def test_dns_check(): - hosts = [ - { - "host": "10.10.0.3", - "port": "", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "10.10.0.3", - "port": "1234", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "2001:0db8:85a3::8a2e:0370:7334", - "port": "", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "2001:0db8:85a3::8a2e:370:7334", - "port": "1234", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "salt-master", - "port": "1234", - "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], - "ret": "127.0.0.1", - }, - ] - for host in hosts: - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, return_value=host["mocked"]), - ): - with patch("socket.socket", create_autospec(socket.socket)): - ret = network.dns_check(host["host"], host["port"]) - assert ret == host["ret"] +@pytest.mark.parametrize( + "host", + ( + ( + { + "host": "10.10.0.3", + "port": "", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + } + ), + ( + { + "host": "10.10.0.3", + "port": "1234", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + } + ), + ( + { + "host": "2001:0db8:85a3::8a2e:0370:7334", + "port": "", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + } + ), + ( + { + "host": "2001:0db8:85a3::8a2e:370:7334", + "port": "1234", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + } + ), + ( + { + "host": "salt-master", + "port": "1234", + "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], + "ret": "127.0.0.1", + } + ), + ), +) +def test_dns_check(host): + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, return_value=host["mocked"]), + ): + with patch("socket.socket", create_autospec(socket.socket)): + ret = network.dns_check(host["host"], host["port"]) + assert ret == host["ret"] def test_dns_check_ipv6_filter(): @@ -512,7 +523,6 @@ def test_test_addrs(): # attempt to connect to resolved address with default timeout s.side_effect = socket.error addrs = network._test_addrs(addrinfo, 80) - time.sleep(2) assert not len(addrs) == 0 # nothing can connect, but we've eliminated duplicates From 8ffa21355de8bbd3d89bd37a81380db1c440f6d5 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 7 Nov 2023 09:04:55 -0700 Subject: [PATCH 079/196] Parameterize as per reviewer comments --- tests/pytests/unit/utils/test_network.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index be97a9f200b..50f0d817e1f 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -971,11 +971,20 @@ def test_gen_mac(): assert ret == expected_mac +@pytest.mark.parametrize( + "mac_addr", + ( + ("31337"), + ("0001020304056"), + ("00:01:02:03:04:056"), + ("a0:b0:c0:d0:e0:fg"), + ), +) +def test_mac_str_to_bytes_exceptions(mac_addr): + pytest.raises(ValueError, network.mac_str_to_bytes, mac_addr) + + def test_mac_str_to_bytes(): - pytest.raises(ValueError, network.mac_str_to_bytes, "31337") - pytest.raises(ValueError, network.mac_str_to_bytes, "0001020304056") - pytest.raises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") - pytest.raises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") assert b"\x10\x08\x06\x04\x02\x00" == network.mac_str_to_bytes("100806040200") assert b"\xf8\xe7\xd6\xc5\xb4\xa3" == network.mac_str_to_bytes("f8e7d6c5b4a3") From c9c0ad0b468e09cf55e7ff0ac6fd6fe556d6d7f2 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Tue, 7 Nov 2023 11:35:25 -0700 Subject: [PATCH 080/196] Further test refractoring from unittest to pytest per reviewer comments --- tests/pytests/unit/utils/test_network.py | 113 +++++++++++++++-------- 1 file changed, 75 insertions(+), 38 deletions(-) diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index 50f0d817e1f..12d545b0154 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -277,12 +277,18 @@ def test__generate_minion_id_with_unicode_in_etc_hosts(): assert "thisismyhostname" in network._generate_minion_id() -def test_is_ip(): - assert network.is_ip("10.10.0.3") - assert not network.is_ip("0.9.800.1000") - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - assert not network.is_ip("sixteen-char-str") +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", True), + ("0.9.800.1000", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ip(addr, expected): + assert network.is_ip(addr) is expected @pytest.mark.parametrize( @@ -553,10 +559,10 @@ def test_is_ipv4_subnet(): def test_is_ipv6_subnet(): for item in IPV6_SUBNETS[True]: log.debug("Testing that %s is a valid subnet", item) - assert network.is_ipv6_subnet(item) + assert network.is_ipv6_subnet(item) is True for item in IPV6_SUBNETS[False]: log.debug("Testing that %s is not a valid subnet", item) - assert not network.is_ipv6_subnet(item) + assert network.is_ipv6_subnet(item) is False @pytest.mark.parametrize( @@ -608,7 +614,6 @@ def test_interfaces_ifconfig_freebsd(freebsd_interfaces_dict): def test_interfaces_ifconfig_solaris(): with patch("salt.utils.platform.is_sunos", return_value=True): - interfaces = network._interfaces_ifconfig(SOLARIS) expected_interfaces = { "ilbint0": { "inet6": [], @@ -659,12 +664,12 @@ def test_interfaces_ifconfig_solaris(): "up": True, }, } + interfaces = network._interfaces_ifconfig(SOLARIS) assert interfaces == expected_interfaces def test_interfaces_ifconfig_netbsd(): - interfaces = network._netbsd_interfaces_ifconfig(NETBSD) - assert interfaces == { + expected_interfaces = { "lo0": { "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], "inet6": [{"address": "fe80::1", "prefixlen": "64", "scope": "lo0"}], @@ -689,6 +694,8 @@ def test_interfaces_ifconfig_netbsd(): "up": True, }, } + interfaces = network._netbsd_interfaces_ifconfig(NETBSD) + assert interfaces == expected_interfaces def test_freebsd_remotes_on(): @@ -793,6 +800,12 @@ def test_generate_minion_id_127_name_startswith(): """ Test if minion IDs can be named starting from "127" """ + expected = [ + "127890.domainname.blank", + "127890", + "1.2.3.4", + "5.6.7.8", + ] with patch("platform.node", MagicMock(return_value="127890")), patch( "socket.gethostname", MagicMock(return_value="127890") ), patch( @@ -806,18 +819,14 @@ def test_generate_minion_id_127_name_startswith(): "salt.utils.network.ip_addrs", MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), ): - assert network._generate_minion_id() == [ - "127890.domainname.blank", - "127890", - "1.2.3.4", - "5.6.7.8", - ] + assert network._generate_minion_id() == expected def test_generate_minion_id_duplicate(): """ Test if IP addresses in the minion IDs are distinct in the pool """ + expected = ["hostname", "1.2.3.4"] with patch("platform.node", MagicMock(return_value="hostname")), patch( "socket.gethostname", MagicMock(return_value="hostname") ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( @@ -829,7 +838,7 @@ def test_generate_minion_id_duplicate(): "salt.utils.network.ip_addrs", MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), ): - assert network._generate_minion_id() == ["hostname", "1.2.3.4"] + assert network._generate_minion_id() == expected def test_generate_minion_id_platform_used(): @@ -964,10 +973,10 @@ def test_generate_minion_id_platform_ip_addr_only(): def test_gen_mac(): + expected_mac = "00:16:3E:01:01:01" with patch("random.randint", return_value=1) as random_mock: assert random_mock.return_value == 1 ret = network.gen_mac("00:16:3E") - expected_mac = "00:16:3E:01:01:01" assert ret == expected_mac @@ -981,12 +990,13 @@ def test_gen_mac(): ), ) def test_mac_str_to_bytes_exceptions(mac_addr): - pytest.raises(ValueError, network.mac_str_to_bytes, mac_addr) + with pytest.raises(ValueError): + network.mac_str_to_bytes(mac_addr) def test_mac_str_to_bytes(): - assert b"\x10\x08\x06\x04\x02\x00" == network.mac_str_to_bytes("100806040200") - assert b"\xf8\xe7\xd6\xc5\xb4\xa3" == network.mac_str_to_bytes("f8e7d6c5b4a3") + assert network.mac_str_to_bytes("100806040200") == b"\x10\x08\x06\x04\x02\x00" + assert network.mac_str_to_bytes("f8e7d6c5b4a3") == b"\xf8\xe7\xd6\xc5\xb4\xa3" @pytest.mark.slow_test @@ -1021,12 +1031,13 @@ def test_filter_by_networks_ips_list(): "193.124.233.5", "fe80::d210:cf3f:64e7:5423", ] - networks = ["10.0.0.0/8", "fe80::/64"] - assert network.filter_by_networks(ips, networks) == [ + expected = [ "10.0.123.200", "10.10.10.10", "fe80::d210:cf3f:64e7:5423", ] + networks = ["10.0.0.0/8", "fe80::/64"] + assert network.filter_by_networks(ips, networks) == expected def test_filter_by_networks_interfaces_dict(): @@ -1095,7 +1106,7 @@ def test_ip_networks(): # Verbose, without loopback ret = network.ip_networks(verbose=True, interface_data=interface_data) - assert ret == { + expected_ret1 = { "10.10.8.0/22": { "prefixlen": 22, "netmask": "255.255.252.0", @@ -1103,11 +1114,13 @@ def test_ip_networks(): "address": "10.10.8.0", }, } + assert ret == expected_ret1 + # Verbose, without loopback, specific interface ret = network.ip_networks( interface="eth0", verbose=True, interface_data=interface_data ) - assert ret == { + expected_ret2 = { "10.10.8.0/22": { "prefixlen": 22, "netmask": "255.255.252.0", @@ -1115,11 +1128,13 @@ def test_ip_networks(): "address": "10.10.8.0", }, } + assert ret == expected_ret2 + # Verbose, without loopback, multiple specific interfaces ret = network.ip_networks( interface="eth0,lo", verbose=True, interface_data=interface_data ) - assert ret == { + expected_ret3 = { "10.10.8.0/22": { "prefixlen": 22, "netmask": "255.255.252.0", @@ -1127,6 +1142,8 @@ def test_ip_networks(): "address": "10.10.8.0", }, } + assert ret == expected_ret3 + # Verbose, without loopback, specific interface (not present) ret = network.ip_networks( interface="eth1", verbose=True, interface_data=interface_data @@ -1136,7 +1153,7 @@ def test_ip_networks(): ret = network.ip_networks( include_loopback=True, verbose=True, interface_data=interface_data ) - assert ret == { + expected_ret4 = { "10.10.8.0/22": { "prefixlen": 22, "netmask": "255.255.252.0", @@ -1150,6 +1167,8 @@ def test_ip_networks(): "address": "127.0.0.0", }, } + assert ret == expected_ret4 + # Verbose, with loopback, specific interface ret = network.ip_networks( interface="eth0", @@ -1157,7 +1176,7 @@ def test_ip_networks(): verbose=True, interface_data=interface_data, ) - assert ret == { + expected_ret5 = { "10.10.8.0/22": { "prefixlen": 22, "netmask": "255.255.252.0", @@ -1165,6 +1184,8 @@ def test_ip_networks(): "address": "10.10.8.0", }, } + assert ret == expected_ret5 + # Verbose, with loopback, multiple specific interfaces ret = network.ip_networks( interface="eth0,lo", @@ -1172,7 +1193,7 @@ def test_ip_networks(): verbose=True, interface_data=interface_data, ) - assert ret == { + expected_ret6 = { "10.10.8.0/22": { "prefixlen": 22, "netmask": "255.255.252.0", @@ -1186,6 +1207,8 @@ def test_ip_networks(): "address": "127.0.0.0", }, } + assert ret == expected_ret6 + # Verbose, with loopback, specific interface (not present) ret = network.ip_networks( interface="eth1", @@ -1235,7 +1258,7 @@ def test_ip_networks6(): # Verbose, without loopback ret = network.ip_networks6(verbose=True, interface_data=interface_data) - assert ret == { + expected_ret1 = { "fe80::/64": { "prefixlen": 64, "netmask": "ffff:ffff:ffff:ffff::", @@ -1243,11 +1266,13 @@ def test_ip_networks6(): "address": "fe80::", }, } + assert ret == expected_ret1 + # Verbose, without loopback, specific interface ret = network.ip_networks6( interface="eth0", verbose=True, interface_data=interface_data ) - assert ret == { + expected_ret2 = { "fe80::/64": { "prefixlen": 64, "netmask": "ffff:ffff:ffff:ffff::", @@ -1255,11 +1280,13 @@ def test_ip_networks6(): "address": "fe80::", }, } + assert ret == expected_ret2 + # Verbose, without loopback, multiple specific interfaces ret = network.ip_networks6( interface="eth0,lo", verbose=True, interface_data=interface_data ) - assert ret == { + expected_ret3 = { "fe80::/64": { "prefixlen": 64, "netmask": "ffff:ffff:ffff:ffff::", @@ -1267,16 +1294,19 @@ def test_ip_networks6(): "address": "fe80::", }, } + assert ret == expected_ret3 + # Verbose, without loopback, specific interface (not present) ret = network.ip_networks6( interface="eth1", verbose=True, interface_data=interface_data ) assert ret == {} + # Verbose, with loopback ret = network.ip_networks6( include_loopback=True, verbose=True, interface_data=interface_data ) - assert ret == { + expected_ret4 = { "fe80::/64": { "prefixlen": 64, "netmask": "ffff:ffff:ffff:ffff::", @@ -1290,6 +1320,8 @@ def test_ip_networks6(): "address": "::1", }, } + assert ret == expected_ret4 + # Verbose, with loopback, specific interface ret = network.ip_networks6( interface="eth0", @@ -1297,7 +1329,7 @@ def test_ip_networks6(): verbose=True, interface_data=interface_data, ) - assert ret == { + expected_ret5 = { "fe80::/64": { "prefixlen": 64, "netmask": "ffff:ffff:ffff:ffff::", @@ -1305,6 +1337,8 @@ def test_ip_networks6(): "address": "fe80::", }, } + assert ret == expected_ret5 + # Verbose, with loopback, multiple specific interfaces ret = network.ip_networks6( interface="eth0,lo", @@ -1312,7 +1346,7 @@ def test_ip_networks6(): verbose=True, interface_data=interface_data, ) - assert ret == { + expected_ret6 = { "fe80::/64": { "prefixlen": 64, "netmask": "ffff:ffff:ffff:ffff::", @@ -1326,6 +1360,8 @@ def test_ip_networks6(): "address": "::1", }, } + assert ret == expected_ret6 + # Verbose, with loopback, specific interface (not present) ret = network.ip_networks6( interface="eth1", @@ -1501,14 +1537,15 @@ def test_interface_and_ip(linux_interfaces_dict): "salt.utils.network.linux_interfaces", MagicMock(return_value=linux_interfaces_dict), ): - ret = network.interface("eth0") - assert ret == [ + expected = [ { "address": "10.10.10.56", "broadcast": "10.10.10.255", "netmask": "255.255.252.0", } ] + ret = network.interface("eth0") + assert ret == expected ret = network.interface("dog") assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' From 1bbe6489fde9c15805595cbb02a147714dbf67ff Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Wed, 8 Nov 2023 17:30:58 -0700 Subject: [PATCH 081/196] Ensure quoted filespec when using egrep to allow for regex with selinux --- changelog/65340.fixed.md | 1 + salt/modules/selinux.py | 2 +- tests/pytests/unit/modules/test_selinux.py | 35 ++++++++++++++++++++++ 3 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 changelog/65340.fixed.md diff --git a/changelog/65340.fixed.md b/changelog/65340.fixed.md new file mode 100644 index 00000000000..ed26da9f3cd --- /dev/null +++ b/changelog/65340.fixed.md @@ -0,0 +1 @@ +Fix regex for filespec adding/deleting fcontext policy in selinux diff --git a/salt/modules/selinux.py b/salt/modules/selinux.py index 7c09783da70..c12db3d9e19 100644 --- a/salt/modules/selinux.py +++ b/salt/modules/selinux.py @@ -617,7 +617,7 @@ def _fcontext_add_or_delete_policy( if "add" == action: # need to use --modify if context for name file exists, otherwise ValueError filespec = re.escape(name) - cmd = f"semanage fcontext -l | egrep {filespec}" + cmd = f"semanage fcontext -l | egrep '{filespec}'" current_entry_text = __salt__["cmd.shell"](cmd, ignore_retcode=True) if current_entry_text != "": action = "modify" diff --git a/tests/pytests/unit/modules/test_selinux.py b/tests/pytests/unit/modules/test_selinux.py index 05d3ca25e24..a48287b7648 100644 --- a/tests/pytests/unit/modules/test_selinux.py +++ b/tests/pytests/unit/modules/test_selinux.py @@ -1,3 +1,5 @@ +import re + import pytest import salt.modules.selinux as selinux @@ -376,3 +378,36 @@ SELINUXTYPE=targeted for line in writes: if line.startswith("SELINUX="): assert line == "SELINUX=disabled" + + +@pytest.mark.parametrize( + "name,sel_type", + ( + ("/srv/ssl/ldap/.*[.]key", "slapd_cert_t"), + ("/srv/ssl/ldap(/.*[.](pem|crt))?", "cert_t"), + ), +) +def test_selinux_add_policy_regex(name, sel_type): + """ + Test adding policy with regex components parsing the stdout response of restorecon used in fcontext_policy_applied, new style. + """ + mock_cmd_shell = MagicMock(return_value={"retcode": 0}) + mock_cmd_run_all = MagicMock(return_value={"retcode": 0}) + + with patch.dict(selinux.__salt__, {"cmd.shell": mock_cmd_shell}), patch.dict( + selinux.__salt__, {"cmd.run_all": mock_cmd_run_all} + ): + selinux.fcontext_add_policy(name, sel_type=sel_type) + filespec = re.escape(name) + filespec_test = f"'{filespec}'" + expected_cmd_shell = f"semanage fcontext -l | egrep {filespec_test}" + mock_cmd_shell.assert_called_once_with( + f"{expected_cmd_shell}", + ignore_retcode=True, + ) + expected_cmd_run_all = ( + f"semanage fcontext --modify --type {sel_type} {filespec}" + ) + mock_cmd_run_all.assert_called_once_with( + f"{expected_cmd_run_all}", + ) From ee75a65837a96a02ae25b8ccb76f5aabf608b7c5 Mon Sep 17 00:00:00 2001 From: David Murphy < dmurphy@saltstack.com> Date: Thu, 9 Nov 2023 11:03:47 -0700 Subject: [PATCH 082/196] Updated test per reviewer's comments --- tests/pytests/unit/modules/test_selinux.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/tests/pytests/unit/modules/test_selinux.py b/tests/pytests/unit/modules/test_selinux.py index a48287b7648..b67a1b52577 100644 --- a/tests/pytests/unit/modules/test_selinux.py +++ b/tests/pytests/unit/modules/test_selinux.py @@ -399,15 +399,14 @@ def test_selinux_add_policy_regex(name, sel_type): ): selinux.fcontext_add_policy(name, sel_type=sel_type) filespec = re.escape(name) - filespec_test = f"'{filespec}'" - expected_cmd_shell = f"semanage fcontext -l | egrep {filespec_test}" + expected_cmd_shell = f"semanage fcontext -l | egrep '{filespec}'" mock_cmd_shell.assert_called_once_with( - f"{expected_cmd_shell}", + expected_cmd_shell, ignore_retcode=True, ) expected_cmd_run_all = ( f"semanage fcontext --modify --type {sel_type} {filespec}" ) mock_cmd_run_all.assert_called_once_with( - f"{expected_cmd_run_all}", + expected_cmd_run_all, ) From 068c5e87779357ea859aedd53cb5a6d24d894f8d Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 13 Nov 2023 17:49:30 -0700 Subject: [PATCH 083/196] Upgrade relenv to 0.14.0 --- .github/workflows/ci.yml | 28 ++++++++++++++-------------- .github/workflows/nightly.yml | 28 ++++++++++++++-------------- .github/workflows/scheduled.yml | 28 ++++++++++++++-------------- .github/workflows/staging.yml | 28 ++++++++++++++-------------- changelog/65316.fixed.md | 4 ++++ cicd/shared-gh-workflows-context.yml | 2 +- 6 files changed, 61 insertions(+), 57 deletions(-) create mode 100644 changelog/65316.fixed.md diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index b91e9f780cb..40876355901 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -444,7 +444,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-windows: @@ -458,7 +458,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-macos: @@ -472,7 +472,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-linux: @@ -488,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-windows: @@ -504,7 +504,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-macos: @@ -520,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -532,7 +532,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -545,7 +545,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -558,7 +558,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -571,7 +571,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -584,7 +584,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -597,7 +597,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -610,7 +610,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -623,7 +623,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 12405289210..8a4fa2f3cc0 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -493,7 +493,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-windows: @@ -507,7 +507,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-macos: @@ -521,7 +521,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-linux: @@ -537,7 +537,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-windows: @@ -553,7 +553,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-macos: @@ -569,7 +569,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -581,7 +581,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -594,7 +594,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -607,7 +607,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -620,7 +620,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -633,7 +633,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" environment: nightly @@ -649,7 +649,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" environment: nightly @@ -665,7 +665,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" environment: nightly @@ -681,7 +681,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" environment: nightly diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index cf7d7af20df..6d70db27ef5 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -478,7 +478,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-windows: @@ -492,7 +492,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-macos: @@ -506,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-linux: @@ -522,7 +522,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-windows: @@ -538,7 +538,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-macos: @@ -554,7 +554,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -566,7 +566,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -579,7 +579,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -592,7 +592,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -605,7 +605,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -618,7 +618,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -631,7 +631,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -644,7 +644,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -657,7 +657,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index a06ed67a46f..d8fea8dd363 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -488,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-windows: @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-deps-onedir-macos: @@ -516,7 +516,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-linux: @@ -532,7 +532,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-windows: @@ -548,7 +548,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-salt-onedir-macos: @@ -564,7 +564,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -576,7 +576,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -589,7 +589,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -602,7 +602,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" @@ -615,7 +615,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" @@ -628,7 +628,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" environment: staging @@ -644,7 +644,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" environment: staging @@ -660,7 +660,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "onedir" environment: staging @@ -676,7 +676,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.13.11" + relenv-version: "0.14.0" python-version: "3.10.13" source: "src" environment: staging diff --git a/changelog/65316.fixed.md b/changelog/65316.fixed.md new file mode 100644 index 00000000000..da51ae73aa0 --- /dev/null +++ b/changelog/65316.fixed.md @@ -0,0 +1,4 @@ +Uprade relenv to 0.14.0 + - Update openssl to address CVE-2023-5363. + - Fix bug in openssl setup when openssl binary can't be found. + - Add M1 mac support. diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index 74eebe098ca..b99248add22 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,3 +1,3 @@ nox_version: "2022.8.7" python_version: "3.10.13" -relenv_version: "0.13.11" +relenv_version: "0.14.0" From 84c6b703c273acf32fd1eb2514a17d5fdb5456b3 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 13 Nov 2023 23:09:07 -0700 Subject: [PATCH 084/196] Relenv 0.14.1 --- .github/workflows/ci.yml | 28 ++++++++++++++-------------- .github/workflows/nightly.yml | 28 ++++++++++++++-------------- .github/workflows/scheduled.yml | 28 ++++++++++++++-------------- .github/workflows/staging.yml | 28 ++++++++++++++-------------- changelog/65316.fixed.md | 2 +- cicd/shared-gh-workflows-context.yml | 2 +- 6 files changed, 58 insertions(+), 58 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 40876355901..2944a516550 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -444,7 +444,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-windows: @@ -458,7 +458,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-macos: @@ -472,7 +472,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-linux: @@ -488,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-windows: @@ -504,7 +504,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-macos: @@ -520,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -532,7 +532,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -545,7 +545,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -558,7 +558,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -571,7 +571,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -584,7 +584,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -597,7 +597,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -610,7 +610,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -623,7 +623,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 8a4fa2f3cc0..aa7e4ec6331 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -493,7 +493,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-windows: @@ -507,7 +507,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-macos: @@ -521,7 +521,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-linux: @@ -537,7 +537,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-windows: @@ -553,7 +553,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-macos: @@ -569,7 +569,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -581,7 +581,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -594,7 +594,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -607,7 +607,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -620,7 +620,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -633,7 +633,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" environment: nightly @@ -649,7 +649,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" environment: nightly @@ -665,7 +665,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" environment: nightly @@ -681,7 +681,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" environment: nightly diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 6d70db27ef5..1c3e639b6ea 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -478,7 +478,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-windows: @@ -492,7 +492,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-macos: @@ -506,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-linux: @@ -522,7 +522,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-windows: @@ -538,7 +538,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-macos: @@ -554,7 +554,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -566,7 +566,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -579,7 +579,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -592,7 +592,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -605,7 +605,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -618,7 +618,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -631,7 +631,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -644,7 +644,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -657,7 +657,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index d8fea8dd363..8db7fab9e41 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -488,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-windows: @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-deps-onedir-macos: @@ -516,7 +516,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-linux: @@ -532,7 +532,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-windows: @@ -548,7 +548,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-salt-onedir-macos: @@ -564,7 +564,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -576,7 +576,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -589,7 +589,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -602,7 +602,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" @@ -615,7 +615,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" @@ -628,7 +628,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" environment: staging @@ -644,7 +644,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" environment: staging @@ -660,7 +660,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "onedir" environment: staging @@ -676,7 +676,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.0" + relenv-version: "0.14.1" python-version: "3.10.13" source: "src" environment: staging diff --git a/changelog/65316.fixed.md b/changelog/65316.fixed.md index da51ae73aa0..4b1d151abef 100644 --- a/changelog/65316.fixed.md +++ b/changelog/65316.fixed.md @@ -1,4 +1,4 @@ -Uprade relenv to 0.14.0 +Uprade relenv to 0.14.1 - Update openssl to address CVE-2023-5363. - Fix bug in openssl setup when openssl binary can't be found. - Add M1 mac support. diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index b99248add22..ca40fb1c643 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,3 +1,3 @@ nox_version: "2022.8.7" python_version: "3.10.13" -relenv_version: "0.14.0" +relenv_version: "0.14.1" From be5ef66a3a9b0f3f1369d6181ac22e95c96f3c9e Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Thu, 2 Nov 2023 15:11:39 -0700 Subject: [PATCH 085/196] Connect callback closes it's request channel --- changelog/65464.fixed.md | 1 + salt/channel/client.py | 2 +- .../pytests/functional/channel/test_client.py | 23 +++++++++++++++++++ 3 files changed, 25 insertions(+), 1 deletion(-) create mode 100644 changelog/65464.fixed.md create mode 100644 tests/pytests/functional/channel/test_client.py diff --git a/changelog/65464.fixed.md b/changelog/65464.fixed.md new file mode 100644 index 00000000000..a931b6a6445 --- /dev/null +++ b/changelog/65464.fixed.md @@ -0,0 +1 @@ +Publish channel connect callback method properly closes it's request channel. diff --git a/salt/channel/client.py b/salt/channel/client.py index 5d07a04ad63..88fbad3ff0b 100644 --- a/salt/channel/client.py +++ b/salt/channel/client.py @@ -564,7 +564,7 @@ class AsyncPubChannel: log.info("fire_master failed", exc_info=True) finally: # SyncWrapper will call either close() or destroy(), whichever is available - del req_channel + req_channel.close() else: self._reconnected = True except Exception as exc: # pylint: disable=broad-except diff --git a/tests/pytests/functional/channel/test_client.py b/tests/pytests/functional/channel/test_client.py new file mode 100644 index 00000000000..43a9dea0c81 --- /dev/null +++ b/tests/pytests/functional/channel/test_client.py @@ -0,0 +1,23 @@ +import salt.channel.client +from tests.support.mock import MagicMock, patch + + +async def test_async_pub_channel_connect_cb(minion_opts): + """ + Validate connect_callback closes the request channel it creates. + """ + minion_opts["master_uri"] = "tcp://127.0.0.1:4506" + minion_opts["master_ip"] = "127.0.0.1" + channel = salt.channel.client.AsyncPubChannel.factory(minion_opts) + + async def send_id(*args): + return + + channel.send_id = send_id + channel._reconnected = True + + mock = MagicMock(salt.channel.client.AsyncReqChannel) + with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock): + await channel.connect_callback(None) + mock.send.assert_called_once() + mock.close.assert_called_once() From 6615b5a5301a8ba70e4a074d6650f81125b6c5ff Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 6 Nov 2023 15:01:39 -0700 Subject: [PATCH 086/196] Use context manager for request channel --- salt/channel/client.py | 23 ++++++++++------------- 1 file changed, 10 insertions(+), 13 deletions(-) diff --git a/salt/channel/client.py b/salt/channel/client.py index 88fbad3ff0b..0ca3cb7b76d 100644 --- a/salt/channel/client.py +++ b/salt/channel/client.py @@ -552,19 +552,16 @@ class AsyncPubChannel: "data": data, "tag": tag, } - req_channel = AsyncReqChannel.factory(self.opts) - try: - yield req_channel.send(load, timeout=60) - except salt.exceptions.SaltReqTimeoutError: - log.info( - "fire_master failed: master could not be contacted. Request timed" - " out." - ) - except Exception: # pylint: disable=broad-except - log.info("fire_master failed", exc_info=True) - finally: - # SyncWrapper will call either close() or destroy(), whichever is available - req_channel.close() + with AsyncReqChannel.factory(self.opts) as channel: + try: + yield channel.send(load, timeout=60) + except salt.exceptions.SaltReqTimeoutError: + log.info( + "fire_master failed: master could not be contacted. Request timed" + " out." + ) + except Exception: # pylint: disable=broad-except + log.info("fire_master failed", exc_info=True) else: self._reconnected = True except Exception as exc: # pylint: disable=broad-except From 6e5a301ca61964bb64c4f5631bbd045d96b4b0da Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 8 Nov 2023 14:42:56 -0700 Subject: [PATCH 087/196] Fix test when using context manager is used --- tests/pytests/functional/channel/test_client.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/channel/test_client.py b/tests/pytests/functional/channel/test_client.py index 43a9dea0c81..daaeb490669 100644 --- a/tests/pytests/functional/channel/test_client.py +++ b/tests/pytests/functional/channel/test_client.py @@ -17,7 +17,9 @@ async def test_async_pub_channel_connect_cb(minion_opts): channel._reconnected = True mock = MagicMock(salt.channel.client.AsyncReqChannel) + mock.__enter__ = lambda self: mock + with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock): await channel.connect_callback(None) mock.send.assert_called_once() - mock.close.assert_called_once() + mock.__exit__.assert_called_once() From 2ddd5fce46652778e68ea066093d79209115641a Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 14 Nov 2023 02:52:55 -0700 Subject: [PATCH 088/196] Use context manager to ensure channel is closed properly --- .../pytests/functional/channel/test_client.py | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/pytests/functional/channel/test_client.py b/tests/pytests/functional/channel/test_client.py index daaeb490669..145ad95b771 100644 --- a/tests/pytests/functional/channel/test_client.py +++ b/tests/pytests/functional/channel/test_client.py @@ -8,18 +8,18 @@ async def test_async_pub_channel_connect_cb(minion_opts): """ minion_opts["master_uri"] = "tcp://127.0.0.1:4506" minion_opts["master_ip"] = "127.0.0.1" - channel = salt.channel.client.AsyncPubChannel.factory(minion_opts) + with salt.channel.client.AsyncPubChannel.factory(minion_opts) as channel: - async def send_id(*args): - return + async def send_id(*args): + return - channel.send_id = send_id - channel._reconnected = True + channel.send_id = send_id + channel._reconnected = True - mock = MagicMock(salt.channel.client.AsyncReqChannel) - mock.__enter__ = lambda self: mock + mock = MagicMock(salt.channel.client.AsyncReqChannel) + mock.__enter__ = lambda self: mock - with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock): - await channel.connect_callback(None) - mock.send.assert_called_once() - mock.__exit__.assert_called_once() + with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock): + await channel.connect_callback(None) + mock.send.assert_called_once() + mock.__exit__.assert_called_once() From 551443ca7f1a82b33bb7f19cc10dccfd9fe5486d Mon Sep 17 00:00:00 2001 From: Salt Project Packaging Date: Tue, 7 Nov 2023 19:55:44 +0000 Subject: [PATCH 089/196] Update the bootstrap script to v2023.11.07 (cherry picked from commit 6531c36679d59c9523dddeaa61f3d23169a9daa6) --- salt/cloud/deploy/bootstrap-salt.sh | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/salt/cloud/deploy/bootstrap-salt.sh b/salt/cloud/deploy/bootstrap-salt.sh index b937fbb7ef7..f66aeea3a8a 100644 --- a/salt/cloud/deploy/bootstrap-salt.sh +++ b/salt/cloud/deploy/bootstrap-salt.sh @@ -23,7 +23,7 @@ #====================================================================================================================== set -o nounset # Treat unset variables as an error -__ScriptVersion="2023.07.25" +__ScriptVersion="2023.11.07" __ScriptName="bootstrap-salt.sh" __ScriptFullName="$0" @@ -1523,7 +1523,7 @@ __check_dpkg_architecture() { else # Saltstack official repository has arm64 metadata beginning with Debian 11, # use amd64 repositories on arm64 for anything older, since all pkgs are arch-independent - if [ "$DISTRO_NAME_L" = "debian" ] || [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then + if [ "$DISTRO_NAME_L" = "debian" ] && [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then __REPO_ARCH="amd64" else __REPO_ARCH="arm64" @@ -1709,6 +1709,14 @@ __debian_codename_translation() { "11") DISTRO_CODENAME="bullseye" ;; + "12") + DISTRO_CODENAME="bookworm" + # FIXME - TEMPORARY + # use bullseye packages until bookworm packages are available + DISTRO_CODENAME="bullseye" + DISTRO_MAJOR_VERSION=11 + rv=11 + ;; *) DISTRO_CODENAME="stretch" ;; @@ -2196,7 +2204,7 @@ __dnf_install_noinput() { #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __tdnf_install_noinput -# DESCRIPTION: (DRY) dnf install with noinput options +# DESCRIPTION: (DRY) tdnf install with noinput options #---------------------------------------------------------------------------------------------------------------------- __tdnf_install_noinput() { @@ -7033,15 +7041,17 @@ install_photon_git_deps() { "${__python}" -m pip install "${dep}" || return 1 done else - __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc" + __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc glibc-devel linux-devel.x86_64" # shellcheck disable=SC2086 __tdnf_install_noinput ${__PACKAGES} || return 1 fi - # Need newer version of setuptools on Photon - _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION}" - echodebug "Running '${_PY_EXE} -m pip --upgrade install ${_setuptools_dep}'" - ${_PY_EXE} -m pip install --upgrade "${_setuptools_dep}" + if [ "${DISTRO_MAJOR_VERSION}" -gt 3 ]; then + # Need newer version of setuptools on Photon + _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION}" + echodebug "Running '${_PY_EXE} -m pip --upgrade install ${_setuptools_dep}'" + ${_PY_EXE} -m pip install --upgrade "${_setuptools_dep}" + fi # Let's trigger config_salt() if [ "$_TEMP_CONFIG_DIR" = "null" ]; then From 02b147ae5953997668b916644688ed724ebc73be Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 3 Nov 2023 12:51:31 +0000 Subject: [PATCH 090/196] Add a FIPS enabled test run under PhotonOS 4 to the CI process Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 25 +++++++++++++++++++ .github/workflows/nightly.yml | 25 +++++++++++++++++++ .github/workflows/scheduled.yml | 25 +++++++++++++++++++ .github/workflows/staging.yml | 24 ++++++++++++++++++ .../workflows/templates/test-salt.yml.jinja | 16 +++++++++--- .github/workflows/test-action.yml | 21 ++++++++++------ tests/conftest.py | 15 +++++++++-- tests/pytests/conftest.py | 8 +++++- tools/pre_commit.py | 19 ++++++++++---- tools/vm.py | 10 +++++++- 10 files changed, 167 insertions(+), 21 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 2944a516550..e94c6a8332b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2632,6 +2632,29 @@ jobs: workflow-slug: ci default-timeout: 180 + photonos-4-fips: + name: Photon OS 4 Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + fips: true + combine-all-code-coverage: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} @@ -2709,6 +2732,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -2905,6 +2929,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index aa7e4ec6331..26b7819954b 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2693,6 +2693,29 @@ jobs: workflow-slug: nightly default-timeout: 360 + photonos-4-fips: + name: Photon OS 4 Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + fips: true + combine-all-code-coverage: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} @@ -2770,6 +2793,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -3665,6 +3689,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 1c3e639b6ea..eb226cab407 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -2666,6 +2666,29 @@ jobs: workflow-slug: scheduled default-timeout: 360 + photonos-4-fips: + name: Photon OS 4 Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + fips: true + combine-all-code-coverage: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} @@ -2743,6 +2766,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -2941,6 +2965,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 8db7fab9e41..84d3b9445a6 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2688,6 +2688,29 @@ jobs: workflow-slug: staging default-timeout: 180 + photonos-4-fips: + name: Photon OS 4 Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + fips: true + build-src-repo: name: Build Repository environment: staging @@ -3644,6 +3667,7 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 + - photonos-4-fips - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index be3e7fd5fa4..a84d7e25aad 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -60,11 +60,16 @@ <%- endfor %> - <%- for slug, display_name, arch in test_salt_listing["linux"] %> + <%- for slug, display_name, arch, fips in test_salt_listing["linux"] %> + <%- if fips %> + <%- set job_name = slug + "-fips" %> + <%- else %> + <%- set job_name = slug %> + <%- endif %> - <{ slug.replace(".", "") }>: - <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> Test + <{ job_name.replace(".", "") }>: + <%- do test_salt_needs.append(job_name.replace(".", "")) %> + name: <{ display_name }> Test<% if fips %>(FIPS)<% endif %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -84,5 +89,8 @@ skip-junit-reports: <{ skip_junit_reports_check }> workflow-slug: <{ workflow_slug }> default-timeout: <{ timeout_value }> + <%- if fips %> + fips: true + <%- endif %> <%- endfor %> diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 53e7bbfa894..3db429ae34a 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -41,6 +41,11 @@ on: type: string description: The python version to run tests with default: "3.10" + fips: + required: false + type: boolean + default: false + description: Test run with FIPS enabled package-name: required: false type: string @@ -207,7 +212,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -216,7 +221,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -225,7 +230,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -235,14 +240,14 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} + ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} - name: Run Slow Tests id: run-slow-tests if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests - name: Run Core Tests @@ -250,7 +255,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests - name: Run Flaky Tests @@ -258,7 +263,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail - name: Run Full Tests @@ -267,7 +272,7 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - -E TEST_GROUP ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ + -E TEST_GROUP ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }} - name: Combine Coverage Reports diff --git a/tests/conftest.py b/tests/conftest.py index edfa61ad422..f0d8d71b496 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -67,6 +67,9 @@ else: # Flag coverage to track suprocesses by pointing it to the right .coveragerc file os.environ["COVERAGE_PROCESS_START"] = str(COVERAGERC_FILE) +# Variable defining a FIPS test run or not +FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1" + # Define the pytest plugins we rely on pytest_plugins = ["helpers_namespace"] @@ -1054,7 +1057,10 @@ def salt_syndic_master_factory( config_defaults["syndic_master"] = "localhost" config_defaults["transport"] = request.config.getoption("--transport") - config_overrides = {"log_level_logfile": "quiet"} + config_overrides = { + "log_level_logfile": "quiet", + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -1167,7 +1173,10 @@ def salt_master_factory( config_defaults["syndic_master"] = "localhost" config_defaults["transport"] = salt_syndic_master_factory.config["transport"] - config_overrides = {"log_level_logfile": "quiet"} + config_overrides = { + "log_level_logfile": "quiet", + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -1275,6 +1284,7 @@ def salt_minion_factory(salt_master_factory): "log_level_logfile": "quiet", "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -1306,6 +1316,7 @@ def salt_sub_minion_factory(salt_master_factory): "log_level_logfile": "quiet", "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 8f354841c1f..79807df8055 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -23,6 +23,7 @@ import salt.ext.tornado.ioloop import salt.utils.files import salt.utils.platform from salt.serializers import yaml +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import Webserver, get_virtualenv_binary_path from tests.support.pytest.helpers import TestAccount from tests.support.runtests import RUNTIME_VARS @@ -186,7 +187,10 @@ def salt_master_factory( os.path.join(RUNTIME_VARS.FILES, "returners") ) config_defaults["event_return"] = "runtests_noop" - config_overrides = {"pytest-master": {"log": {"level": "DEBUG"}}} + config_overrides = { + "pytest-master": {"log": {"level": "DEBUG"}}, + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -315,6 +319,7 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -345,6 +350,7 @@ def salt_sub_minion_factory(salt_master_factory, salt_sub_minion_id): config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() diff --git a/tools/pre_commit.py b/tools/pre_commit.py index 7e86b69fdb4..f671b69c859 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -85,7 +85,7 @@ def generate_workflows(ctx: Context): }, } test_salt_listing = { - "linux": ( + "linux": [ ("almalinux-8", "Alma Linux 8", "x86_64"), ("almalinux-9", "Alma Linux 9", "x86_64"), ("amazonlinux-2", "Amazon Linux 2", "x86_64"), @@ -114,14 +114,23 @@ def generate_workflows(ctx: Context): ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64"), - ), - "macos": (("macos-12", "macOS 12", "x86_64"),), - "windows": ( + ], + "macos": [ + ("macos-12", "macOS 12", "x86_64"), + ], + "windows": [ ("windows-2016", "Windows 2016", "amd64"), ("windows-2019", "Windows 2019", "amd64"), ("windows-2022", "Windows 2022", "amd64"), - ), + ], } + for idx, (slug, display_name, arch) in enumerate(test_salt_listing["linux"][:]): + fips = False + test_salt_listing["linux"][idx] = (slug, display_name, arch, fips) # type: ignore[assignment] + if slug == "photonos-4": + fips = True + test_salt_listing["linux"].append((slug, display_name, arch, fips)) # type: ignore[arg-type] + test_salt_pkg_listing = { "linux": ( ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"), diff --git a/tools/vm.py b/tools/vm.py index 33a230b7de3..ca3717aa909 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -306,6 +306,7 @@ def test( print_system_info: bool = False, skip_code_coverage: bool = False, envvars: list[str] = None, + fips: bool = False, ): """ Run test in the VM. @@ -341,6 +342,9 @@ def test( if "photonos" in name: skip_known_failures = os.environ.get("SKIP_INITIAL_PHOTONOS_FAILURES", "1") env["SKIP_INITIAL_PHOTONOS_FAILURES"] = skip_known_failures + if fips: + env["FIPS_TESTRUN"] = "1" + vm.run(["tdnf", "install", "-y", "openssl-fips-provider"], sudo=True) if envvars: for key in envvars: if key not in os.environ: @@ -853,6 +857,9 @@ class VM: forward_agent = "no" else: forward_agent = "yes" + ciphers = "" + if "photonos" in self.name: + ciphers = "Ciphers=aes256-gcm@openssh.com,aes256-cbc,aes256-ctr,chacha20-poly1305@openssh.com,aes128-ctr,aes192-ctr,aes128-gcm@openssh.com" ssh_config = textwrap.dedent( f"""\ Host {self.name} @@ -864,7 +871,8 @@ class VM: StrictHostKeyChecking=no UserKnownHostsFile=/dev/null ForwardAgent={forward_agent} - PasswordAuthentication no + PasswordAuthentication=no + {ciphers} """ ) self.ssh_config_file.write_text(ssh_config) From 3c76698d545af2cf41fdac5b3835a6cb0b8e49d9 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 9 Nov 2023 17:52:47 +0000 Subject: [PATCH 091/196] Also run package tests under FIPS Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 26 ++++++++- .github/workflows/nightly.yml | 26 ++++++++- .github/workflows/scheduled.yml | 26 ++++++++- .github/workflows/staging.yml | 26 ++++++++- .../templates/test-salt-pkg.yml.jinja | 14 +++-- .github/workflows/test-packages-action.yml | 8 ++- pkg/tests/conftest.py | 5 ++ tools/pre_commit.py | 58 ++++++++++++------- 8 files changed, 157 insertions(+), 32 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e94c6a8332b..f311fa76b62 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1665,7 +1665,7 @@ jobs: testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-5-arm64-pkg-tests: - name: Photon OS 5 Arm64 Package Test + name: Photon OS 4 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1686,6 +1686,29 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + photonos-4-pkg-tests-fips: + name: Photon OS 4 Package Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2948,6 +2971,7 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests + - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 26b7819954b..cdb0f2ef654 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1726,7 +1726,7 @@ jobs: testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-5-arm64-pkg-tests: - name: Photon OS 5 Arm64 Package Test + name: Photon OS 4 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1747,6 +1747,29 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + photonos-4-pkg-tests-fips: + name: Photon OS 4 Package Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -3769,6 +3792,7 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests + - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index eb226cab407..af0b7200770 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -1699,7 +1699,7 @@ jobs: testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-5-arm64-pkg-tests: - name: Photon OS 5 Arm64 Package Test + name: Photon OS 4 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1720,6 +1720,29 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + photonos-4-pkg-tests-fips: + name: Photon OS 4 Package Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2984,6 +3007,7 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests + - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 84d3b9445a6..c894a7fdcf4 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1721,7 +1721,7 @@ jobs: testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} photonos-5-arm64-pkg-tests: - name: Photon OS 5 Arm64 Package Test + name: Photon OS 4 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1742,6 +1742,29 @@ jobs: skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + photonos-4-pkg-tests-fips: + name: Photon OS 4 Package Test(FIPS) + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + ubuntu-2004-pkg-tests: name: Ubuntu 20.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -3686,6 +3709,7 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests + - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index ad9d122f7ad..43b736d5414 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -1,10 +1,13 @@ - <%- for slug, display_name, arch, pkg_type in test_salt_pkg_listing["linux"] %> - - <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> + <%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %> + <%- if fips == "fips" %> + <%- set job_name = "{}-pkg-tests-fips".format(slug.replace(".", "")) %> + <%- else %> + <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> + <%- endif %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> Package Test + name: <{ display_name }> Package Test<% if fips == "fips" %>(FIPS)<% endif %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -24,6 +27,9 @@ skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + <%- if fips == "fips" %> + fips: true + <%- endif %> <%- endfor %> diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 1418bc93e30..9c8a210ce00 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -40,6 +40,11 @@ on: type: string description: The python version to run tests with default: "3.10" + fips: + required: false + type: boolean + default: false + description: Test run with FIPS enabled package-name: required: false type: string @@ -190,12 +195,11 @@ jobs: run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ - ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Download Test Run Artifacts diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 36c60b0e57b..63610564cef 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -25,6 +25,9 @@ from tests.support.sminion import create_sminion log = logging.getLogger(__name__) +# Variable defining a FIPS test run or not +FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1" + @pytest.fixture(scope="session") def version(install_salt): @@ -336,6 +339,7 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): "rest_cherrypy": {"port": 8000, "disable_ssl": True}, "netapi_enable_clients": ["local"], "external_auth": {"auto": {"saltdev": [".*"]}}, + "fips_mode": FIPS_TESTRUN, } test_user = False master_config = install_salt.config_path / "master" @@ -469,6 +473,7 @@ def salt_minion(salt_factories, salt_master, install_salt): "id": minion_id, "file_roots": salt_master.config["file_roots"].copy(), "pillar_roots": salt_master.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } if platform.is_windows(): config_overrides[ diff --git a/tools/pre_commit.py b/tools/pre_commit.py index f671b69c859..fad8ed6d2bc 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -133,28 +133,41 @@ def generate_workflows(ctx: Context): test_salt_pkg_listing = { "linux": ( - ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"), - ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "rpm"), - ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm"), - ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64", "rpm"), - ("centos-7", "CentOS 7", "x86_64", "rpm"), - ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"), - ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"), - ("debian-10", "Debian 10", "x86_64", "deb"), - ("debian-11", "Debian 11", "x86_64", "deb"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"), - ("debian-12", "Debian 12", "x86_64", "deb"), - ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb"), - ("photonos-3", "Photon OS 3", "x86_64", "rpm"), - ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm"), - ("photonos-4", "Photon OS 4", "x86_64", "rpm"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm"), - ("photonos-5", "Photon OS 5", "x86_64", "rpm"), - ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "rpm"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm", "no-fips"), + ( + "amazonlinux-2-arm64", + "Amazon Linux 2 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm", "no-fips"), + ( + "amazonlinux-2023-arm64", + "Amazon Linux 2023 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm", "no-fips"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm", "no-fips"), + ("debian-10", "Debian 10", "x86_64", "deb", "no-fips"), + ("debian-11", "Debian 11", "x86_64", "deb", "no-fips"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb", "no-fips"), + ("debian-12", "Debian 12", "x86_64", "deb", "no-fips"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb", "no-fips"), + ("photonos-3", "Photon OS 3", "x86_64", "rpm", "no-fips"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "rpm", "no-fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "no-fips"), + ("photonos-5", "Photon OS 5", "x86_64", "rpm", "no-fips"), + ("photonos-5-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "rpm", "fips"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb", "no-fips"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb", "no-fips"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb", "no-fips"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb", "no-fips"), ), "macos": (("macos-12", "macOS 12", "x86_64"),), "windows": ( @@ -163,6 +176,7 @@ def generate_workflows(ctx: Context): ("windows-2022", "Windows 2022", "amd64"), ), } + build_ci_deps_listing = { "linux": [ ("almalinux-8", "Alma Linux 8", "x86_64"), From 10e9e6c98b2953aef12b57bee42423d5df1bdf02 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 9 Nov 2023 17:55:06 +0000 Subject: [PATCH 092/196] Don't use separate jobs, just use the matrix Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 57 +++------------ .github/workflows/nightly.yml | 57 +++------------ .github/workflows/scheduled.yml | 57 +++------------ .github/workflows/staging.yml | 56 +++------------ .../templates/test-salt-pkg.yml.jinja | 8 +-- .../workflows/templates/test-salt.yml.jinja | 14 ++-- .github/workflows/test-action.yml | 18 ++--- .../workflows/test-packages-action-macos.yml | 10 +-- .github/workflows/test-packages-action.yml | 18 ++--- tools/ci.py | 35 +++++++++- tools/pre_commit.py | 69 +++++++++---------- 11 files changed, 126 insertions(+), 273 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index f311fa76b62..18ca2dd740a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1619,6 +1619,7 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1641,6 +1642,7 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1663,9 +1665,10 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: - name: Photon OS 4 Arm64 Package Test + name: Photon OS 5 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1685,28 +1688,6 @@ jobs: skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - photonos-4-pkg-tests-fips: - name: Photon OS 4 Package Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-rpm-pkgs-onedir - - photonos-4-ci-deps - uses: ./.github/workflows/test-packages-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - skip-junit-reports: ${{ github.event_name == 'pull_request' }} - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} fips: true ubuntu-2004-pkg-tests: @@ -2500,6 +2481,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2522,6 +2504,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true photonos-5: name: Photon OS 5 Test @@ -2544,6 +2527,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2566,6 +2550,7 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} workflow-slug: ci default-timeout: 180 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2655,29 +2640,6 @@ jobs: workflow-slug: ci default-timeout: 180 - photonos-4-fips: - name: Photon OS 4 Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - photonos-4-ci-deps - uses: ./.github/workflows/test-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} - skip-junit-reports: ${{ github.event_name == 'pull_request' }} - workflow-slug: ci - default-timeout: 180 - fips: true - combine-all-code-coverage: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} @@ -2755,7 +2717,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -2952,7 +2913,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests @@ -2971,7 +2931,6 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests - - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index cdb0f2ef654..00fec20dc09 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1680,6 +1680,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1702,6 +1703,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1724,9 +1726,10 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: - name: Photon OS 4 Arm64 Package Test + name: Photon OS 5 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1746,28 +1749,6 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - photonos-4-pkg-tests-fips: - name: Photon OS 4 Package Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-rpm-pkgs-onedir - - photonos-4-ci-deps - uses: ./.github/workflows/test-packages-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: false - skip-junit-reports: false - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} fips: true ubuntu-2004-pkg-tests: @@ -2561,6 +2542,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2583,6 +2565,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true photonos-5: name: Photon OS 5 Test @@ -2605,6 +2588,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2627,6 +2611,7 @@ jobs: skip-junit-reports: false workflow-slug: nightly default-timeout: 360 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2716,29 +2701,6 @@ jobs: workflow-slug: nightly default-timeout: 360 - photonos-4-fips: - name: Photon OS 4 Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - photonos-4-ci-deps - uses: ./.github/workflows/test-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: false - skip-junit-reports: false - workflow-slug: nightly - default-timeout: 360 - fips: true - combine-all-code-coverage: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} @@ -2816,7 +2778,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -3712,7 +3673,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -3792,7 +3752,6 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests - - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index af0b7200770..47e76e06312 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -1653,6 +1653,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1675,6 +1676,7 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1697,9 +1699,10 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: - name: Photon OS 4 Arm64 Package Test + name: Photon OS 5 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1719,28 +1722,6 @@ jobs: skip-code-coverage: false skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - photonos-4-pkg-tests-fips: - name: Photon OS 4 Package Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-rpm-pkgs-onedir - - photonos-4-ci-deps - uses: ./.github/workflows/test-packages-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: false - skip-junit-reports: false - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} fips: true ubuntu-2004-pkg-tests: @@ -2534,6 +2515,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2556,6 +2538,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true photonos-5: name: Photon OS 5 Test @@ -2578,6 +2561,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2600,6 +2584,7 @@ jobs: skip-junit-reports: false workflow-slug: scheduled default-timeout: 360 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2689,29 +2674,6 @@ jobs: workflow-slug: scheduled default-timeout: 360 - photonos-4-fips: - name: Photon OS 4 Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - photonos-4-ci-deps - uses: ./.github/workflows/test-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: false - skip-junit-reports: false - workflow-slug: scheduled - default-timeout: 360 - fips: true - combine-all-code-coverage: name: Combine Code Coverage if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} @@ -2789,7 +2751,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips steps: - uses: actions/checkout@v4 @@ -2988,7 +2949,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests @@ -3007,7 +2967,6 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests - - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c894a7fdcf4..c2a5ac7469d 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1675,6 +1675,7 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-4-arm64-pkg-tests: name: Photon OS 4 Arm64 Package Test @@ -1697,6 +1698,7 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-pkg-tests: name: Photon OS 5 Package Test @@ -1719,9 +1721,10 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true photonos-5-arm64-pkg-tests: - name: Photon OS 4 Arm64 Package Test + name: Photon OS 5 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -1741,28 +1744,6 @@ jobs: skip-code-coverage: true skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - - photonos-4-pkg-tests-fips: - name: Photon OS 4 Package Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - build-rpm-pkgs-onedir - - photonos-4-ci-deps - uses: ./.github/workflows/test-packages-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - nox-version: 2022.8.7 - python-version: "3.10" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: true - skip-junit-reports: true - testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} fips: true ubuntu-2004-pkg-tests: @@ -2556,6 +2537,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true photonos-4-arm64: name: Photon OS 4 Arm64 Test @@ -2578,6 +2560,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true photonos-5: name: Photon OS 5 Test @@ -2600,6 +2583,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true photonos-5-arm64: name: Photon OS 5 Arm64 Test @@ -2622,6 +2606,7 @@ jobs: skip-junit-reports: true workflow-slug: staging default-timeout: 180 + fips: true ubuntu-2004: name: Ubuntu 20.04 Test @@ -2711,29 +2696,6 @@ jobs: workflow-slug: staging default-timeout: 180 - photonos-4-fips: - name: Photon OS 4 Test(FIPS) - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} - needs: - - prepare-workflow - - photonos-4-ci-deps - uses: ./.github/workflows/test-action.yml - with: - distro-slug: photonos-4 - nox-session: ci-test-onedir - platform: linux - arch: x86_64 - nox-version: 2022.8.7 - python-version: "3.10" - testrun: ${{ needs.prepare-workflow.outputs.testrun }} - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - skip-code-coverage: true - skip-junit-reports: true - workflow-slug: staging - default-timeout: 180 - fips: true - build-src-repo: name: Build Repository environment: staging @@ -3690,7 +3652,6 @@ jobs: - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - - photonos-4-fips - amazonlinux-2-pkg-tests - amazonlinux-2-arm64-pkg-tests - amazonlinux-2023-pkg-tests @@ -3709,7 +3670,6 @@ jobs: - photonos-4-arm64-pkg-tests - photonos-5-pkg-tests - photonos-5-arm64-pkg-tests - - photonos-4-pkg-tests-fips - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 43b736d5414..eb8b43d071f 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -1,13 +1,9 @@ <%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %> - <%- if fips == "fips" %> - <%- set job_name = "{}-pkg-tests-fips".format(slug.replace(".", "")) %> - <%- else %> - <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> - <%- endif %> + <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> Package Test<% if fips == "fips" %>(FIPS)<% endif %> + name: <{ display_name }> Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index a84d7e25aad..8e9ec9effde 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -59,17 +59,11 @@ <%- endfor %> - <%- for slug, display_name, arch, fips in test_salt_listing["linux"] %> - <%- if fips %> - <%- set job_name = slug + "-fips" %> - <%- else %> - <%- set job_name = slug %> - <%- endif %> - <{ job_name.replace(".", "") }>: - <%- do test_salt_needs.append(job_name.replace(".", "")) %> - name: <{ display_name }> Test<% if fips %>(FIPS)<% endif %> + <{ slug.replace(".", "") }>: + <%- do test_salt_needs.append(slug.replace(".", "")) %> + name: <{ display_name }> Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow @@ -89,7 +83,7 @@ skip-junit-reports: <{ skip_junit_reports_check }> workflow-slug: <{ workflow_slug }> default-timeout: <{ timeout_value }> - <%- if fips %> + <%- if fips == "fips" %> fips: true <%- endif %> diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 3db429ae34a..b60a17af0a6 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -105,7 +105,7 @@ jobs: - name: Generate Test Matrix id: generate-matrix run: | - tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.distro-slug }} + tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} test: name: Test @@ -212,7 +212,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -221,7 +221,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -230,7 +230,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests --suppress-no-test-exit-code \ --from-filenames=testrun-changed-files.txt @@ -240,14 +240,14 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} + ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} - name: Run Slow Tests id: run-slow-tests if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests - name: Run Core Tests @@ -255,7 +255,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests - name: Run Flaky Tests @@ -263,7 +263,7 @@ jobs: if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail - name: Run Full Tests @@ -272,7 +272,7 @@ jobs: run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - -E TEST_GROUP ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ + -E TEST_GROUP ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }} - name: Combine Coverage Reports diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 41048bd4e34..378adf90d1c 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -172,7 +172,7 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.test-chunk }} \ + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests @@ -186,7 +186,7 @@ jobs: SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" COVERAGE_CONTEXT: ${{ inputs.distro-slug }} run: | - sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.test-chunk }} \ + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Fix file ownership @@ -206,7 +206,7 @@ jobs: if: always() uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: | artifacts !artifacts/salt/* @@ -232,7 +232,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: artifacts - name: Show Test Run Artifacts @@ -254,6 +254,6 @@ jobs: # always run even if the previous steps fails if: always() && inputs.skip-junit-reports == false && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.test-chunk }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 9c8a210ce00..0f80439d36d 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -99,7 +99,8 @@ jobs: - name: Generate Package Test Matrix id: generate-pkg-matrix run: | - tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} + tools ci pkg-matrix ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} test: @@ -186,7 +187,7 @@ jobs: tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} - name: Downgrade importlib-metadata - if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7", "debian-10"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.test-chunk) }} + if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7", "debian-10"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.tests-chunk) }} run: | # This step can go away once we stop testing classic packages upgrade/downgrades to/from 3005.x tools --timestamps vm ssh ${{ inputs.distro-slug }} -- "sudo python3 -m pip install -U 'importlib-metadata<=4.13.0' 'virtualenv<=20.21.1'" @@ -194,12 +195,13 @@ jobs: - name: Show System Info & Test Plan run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \ + ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} -- ${{ matrix.test-chunk }} \ + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ matrix.fips && '--fips ' || '' }}\ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \ ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Download Test Run Artifacts @@ -221,7 +223,7 @@ jobs: if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: | artifacts !artifacts/salt/* @@ -250,7 +252,7 @@ jobs: id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.test-chunk }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: artifacts - name: Show Test Run Artifacts @@ -263,6 +265,6 @@ jobs: # always run even if the previous steps fails if: always() && inputs.skip-junit-reports == false && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.test-chunk }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/tools/ci.py b/tools/ci.py index 4e81f3e8411..59ef3e38db9 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -625,9 +625,18 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): "workflow": { "help": "Which workflow is running", }, + "fips": { + "help": "Include FIPS entries in the matrix", + }, }, ) -def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = "ci"): +def matrix( + ctx: Context, + distro_slug: str, + full: bool = False, + workflow: str = "ci", + fips: bool = False, +): """ Generate the test matrix. """ @@ -674,8 +683,22 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = " "test-group-count": splits, } ) + if ( + fips is True + and transport != "tcp" + and distro_slug.startswith(("photonos-4", "photonos-5")) + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) else: _matrix.append({"transport": transport, "tests-chunk": chunk}) + if ( + fips is True + and transport != "tcp" + and distro_slug.startswith(("photonos-4", "photonos-5")) + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) @@ -701,6 +724,9 @@ def matrix(ctx: Context, distro_slug: str, full: bool = False, workflow: str = " "nargs": "+", "required": True, }, + "fips": { + "help": "Include FIPS entries in the matrix", + }, }, ) def pkg_matrix( @@ -708,6 +734,7 @@ def pkg_matrix( distro_slug: str, pkg_type: str, testing_releases: list[tools.utils.Version] = None, + fips: bool = False, ): """ Generate the test matrix. @@ -836,10 +863,14 @@ def pkg_matrix( continue _matrix.append( { - "test-chunk": session, + "tests-chunk": session, "version": version, } ) + if fips is True and distro_slug.startswith(("photonos-4", "photonos-5")): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) + ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) diff --git a/tools/pre_commit.py b/tools/pre_commit.py index fad8ed6d2bc..9819b0717c0 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -86,34 +86,34 @@ def generate_workflows(ctx: Context): } test_salt_listing = { "linux": [ - ("almalinux-8", "Alma Linux 8", "x86_64"), - ("almalinux-9", "Alma Linux 9", "x86_64"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64"), - ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), - ("amazonlinux-2023", "Amazon Linux 2023", "x86_64"), - ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64"), - ("archlinux-lts", "Arch Linux LTS", "x86_64"), - ("centos-7", "CentOS 7", "x86_64"), - ("centosstream-8", "CentOS Stream 8", "x86_64"), - ("centosstream-9", "CentOS Stream 9", "x86_64"), - ("debian-10", "Debian 10", "x86_64"), - ("debian-11", "Debian 11", "x86_64"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("debian-12", "Debian 12", "x86_64"), - ("debian-12-arm64", "Debian 12 Arm64", "aarch64"), - ("fedora-37", "Fedora 37", "x86_64"), - ("fedora-38", "Fedora 38", "x86_64"), - ("opensuse-15", "Opensuse 15", "x86_64"), - ("photonos-3", "Photon OS 3", "x86_64"), - ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64"), - ("photonos-4", "Photon OS 4", "x86_64"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64"), - ("photonos-5", "Photon OS 5", "x86_64"), - ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64"), + ("almalinux-8", "Alma Linux 8", "x86_64", "no-fips"), + ("almalinux-9", "Alma Linux 9", "x86_64", "no-fips"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "no-fips"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "no-fips"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "no-fips"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64", "no-fips"), + ("archlinux-lts", "Arch Linux LTS", "x86_64", "no-fips"), + ("centos-7", "CentOS 7", "x86_64", "no-fips"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "no-fips"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "no-fips"), + ("debian-10", "Debian 10", "x86_64", "no-fips"), + ("debian-11", "Debian 11", "x86_64", "no-fips"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "no-fips"), + ("debian-12", "Debian 12", "x86_64", "no-fips"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "no-fips"), + ("fedora-37", "Fedora 37", "x86_64", "no-fips"), + ("fedora-38", "Fedora 38", "x86_64", "no-fips"), + ("opensuse-15", "Opensuse 15", "x86_64", "no-fips"), + ("photonos-3", "Photon OS 3", "x86_64", "no-fips"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "fips"), + ("photonos-5", "Photon OS 5", "x86_64", "fips"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "fips"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "no-fips"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "no-fips"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "no-fips"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "no-fips"), ], "macos": [ ("macos-12", "macOS 12", "x86_64"), @@ -124,12 +124,6 @@ def generate_workflows(ctx: Context): ("windows-2022", "Windows 2022", "amd64"), ], } - for idx, (slug, display_name, arch) in enumerate(test_salt_listing["linux"][:]): - fips = False - test_salt_listing["linux"][idx] = (slug, display_name, arch, fips) # type: ignore[assignment] - if slug == "photonos-4": - fips = True - test_salt_listing["linux"].append((slug, display_name, arch, fips)) # type: ignore[arg-type] test_salt_pkg_listing = { "linux": ( @@ -159,11 +153,10 @@ def generate_workflows(ctx: Context): ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb", "no-fips"), ("photonos-3", "Photon OS 3", "x86_64", "rpm", "no-fips"), ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm", "no-fips"), - ("photonos-4", "Photon OS 4", "x86_64", "rpm", "no-fips"), - ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "no-fips"), - ("photonos-5", "Photon OS 5", "x86_64", "rpm", "no-fips"), - ("photonos-5-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "no-fips"), ("photonos-4", "Photon OS 4", "x86_64", "rpm", "fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "fips"), + ("photonos-5", "Photon OS 5", "x86_64", "rpm", "fips"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "rpm", "fips"), ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb", "no-fips"), ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb", "no-fips"), ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb", "no-fips"), From d9a2ae0d3e10269196b9c44ab3af7c5f5c744258 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 19 Oct 2023 15:49:38 +0100 Subject: [PATCH 093/196] More explicit variable name Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 64 +++++++++---------- .github/workflows/nightly.yml | 64 +++++++++---------- .github/workflows/scheduled.yml | 64 +++++++++---------- .github/workflows/staging.yml | 64 +++++++++---------- .../workflows/templates/test-salt.yml.jinja | 6 +- .github/workflows/test-action-macos.yml | 12 ++-- .github/workflows/test-action.yml | 4 +- 7 files changed, 139 insertions(+), 139 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 18ca2dd740a..902076cea4e 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1945,7 +1945,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1967,7 +1967,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -1989,7 +1989,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2011,7 +2011,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2033,7 +2033,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2055,7 +2055,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2077,7 +2077,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2099,7 +2099,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2121,7 +2121,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2143,7 +2143,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2165,7 +2165,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2187,7 +2187,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2209,7 +2209,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2231,7 +2231,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2253,7 +2253,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2275,7 +2275,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2297,7 +2297,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2319,7 +2319,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2341,7 +2341,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2363,7 +2363,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2385,7 +2385,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2407,7 +2407,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2429,7 +2429,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2451,7 +2451,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2473,7 +2473,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2496,7 +2496,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2519,7 +2519,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2542,7 +2542,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2565,7 +2565,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2587,7 +2587,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2609,7 +2609,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2631,7 +2631,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 00fec20dc09..d3c963f61e1 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -2006,7 +2006,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2028,7 +2028,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2050,7 +2050,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2072,7 +2072,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2094,7 +2094,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2116,7 +2116,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2138,7 +2138,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2160,7 +2160,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2182,7 +2182,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2204,7 +2204,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2226,7 +2226,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2248,7 +2248,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2270,7 +2270,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2292,7 +2292,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2314,7 +2314,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2336,7 +2336,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2358,7 +2358,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2380,7 +2380,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2402,7 +2402,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2424,7 +2424,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2446,7 +2446,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2468,7 +2468,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2490,7 +2490,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2512,7 +2512,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2534,7 +2534,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2557,7 +2557,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2580,7 +2580,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2603,7 +2603,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2626,7 +2626,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2648,7 +2648,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2670,7 +2670,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2692,7 +2692,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 47e76e06312..a093a8fdfa8 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -1979,7 +1979,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2001,7 +2001,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2023,7 +2023,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2045,7 +2045,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2067,7 +2067,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2089,7 +2089,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2111,7 +2111,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2133,7 +2133,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2155,7 +2155,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2177,7 +2177,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2199,7 +2199,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2221,7 +2221,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2243,7 +2243,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2265,7 +2265,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2287,7 +2287,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2309,7 +2309,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2331,7 +2331,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2353,7 +2353,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2375,7 +2375,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2397,7 +2397,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2419,7 +2419,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2441,7 +2441,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2463,7 +2463,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2485,7 +2485,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2507,7 +2507,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2530,7 +2530,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2553,7 +2553,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2576,7 +2576,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2599,7 +2599,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2621,7 +2621,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2643,7 +2643,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2665,7 +2665,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c2a5ac7469d..c89eebc1032 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2001,7 +2001,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2023,7 +2023,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2045,7 +2045,7 @@ jobs: platform: windows arch: amd64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2067,7 +2067,7 @@ jobs: platform: darwin arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2089,7 +2089,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2111,7 +2111,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2133,7 +2133,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2155,7 +2155,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2177,7 +2177,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2199,7 +2199,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2221,7 +2221,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2243,7 +2243,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2265,7 +2265,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2287,7 +2287,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2309,7 +2309,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2331,7 +2331,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2353,7 +2353,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2375,7 +2375,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2397,7 +2397,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2419,7 +2419,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2441,7 +2441,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2463,7 +2463,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2485,7 +2485,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2507,7 +2507,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2529,7 +2529,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2552,7 +2552,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2575,7 +2575,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2598,7 +2598,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2621,7 +2621,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2643,7 +2643,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2665,7 +2665,7 @@ jobs: platform: linux arch: x86_64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 @@ -2687,7 +2687,7 @@ jobs: platform: linux arch: aarch64 nox-version: 2022.8.7 - python-version: "3.10" + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index 8e9ec9effde..e99773276aa 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -20,7 +20,7 @@ platform: windows arch: amd64 nox-version: <{ nox_version }> - python-version: "<{ gh_actions_workflows_python_version }>" + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> @@ -48,7 +48,7 @@ platform: darwin arch: x86_64 nox-version: <{ nox_version }> - python-version: "<{ gh_actions_workflows_python_version }>" + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> @@ -75,7 +75,7 @@ platform: linux arch: <{ arch }> nox-version: <{ nox_version }> - python-version: "<{ gh_actions_workflows_python_version }>" + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 6eb610302c0..383bc3efe44 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -16,7 +16,7 @@ on: required: true type: string description: JSON string containing information about what and how to run the test suite - python-version: + gh-actions-python-version: required: false type: string description: The python version to run tests with @@ -147,16 +147,16 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.gh-actions-python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache fail-on-cache-miss: true - - name: Set up Python ${{ inputs.python-version }} + - name: Set up Python ${{ inputs.gh-actions-python-version }} uses: actions/setup-python@v4 with: - python-version: "${{ inputs.python-version }}" + python-version: "${{ inputs.gh-actions-python-version }}" - name: Install Nox run: | @@ -401,10 +401,10 @@ jobs: run: | tree -a artifacts - - name: Set up Python ${{ inputs.python-version }} + - name: Set up Python ${{ inputs.gh-actions-python-version }} uses: actions/setup-python@v4 with: - python-version: "${{ inputs.python-version }}" + python-version: "${{ inputs.gh-actions-python-version }}" - name: Install Nox run: | diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index b60a17af0a6..706f4a0d6b5 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -36,7 +36,7 @@ on: required: true type: string description: The nox version to install - python-version: + gh-actions-python-version: required: false type: string description: The python version to run tests with @@ -157,7 +157,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.gh-actions-python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache From 3f3154ed4103af93d12f177f0566cce6cf83c213 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 20 Oct 2023 06:24:47 +0100 Subject: [PATCH 094/196] Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 Signed-off-by: Pedro Algarvio --- changelog/65287.fixed.md | 1 + salt/config/__init__.py | 6 ++++-- salt/fileclient.py | 9 +++++---- salt/fileserver/hgfs.py | 4 ++-- salt/fileserver/svnfs.py | 3 ++- salt/modules/guestfs.py | 3 ++- salt/modules/test.py | 3 ++- salt/modules/timezone.py | 3 ++- salt/netapi/rest_tornado/__init__.py | 5 ++++- salt/pillar/hg_pillar.py | 3 ++- salt/tokens/localfs.py | 3 ++- salt/tokens/rediscluster.py | 4 ++-- salt/utils/extmods.py | 3 ++- salt/utils/gitfs.py | 4 ++-- .../netapi/rest_tornado/test_websockets_handler.py | 5 ++++- 15 files changed, 38 insertions(+), 21 deletions(-) create mode 100644 changelog/65287.fixed.md diff --git a/changelog/65287.fixed.md b/changelog/65287.fixed.md new file mode 100644 index 00000000000..e075d251820 --- /dev/null +++ b/changelog/65287.fixed.md @@ -0,0 +1 @@ +Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 diff --git a/salt/config/__init__.py b/salt/config/__init__.py index d3478340bb6..f946bc7f010 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -49,6 +49,8 @@ log = logging.getLogger(__name__) _DFLT_REFSPECS = ["+refs/heads/*:refs/remotes/origin/*", "+refs/tags/*:refs/tags/*"] DEFAULT_INTERVAL = 60 +DEFAULT_HASH_TYPE = "sha256" + if salt.utils.platform.is_windows(): # Since an 'ipc_mode' of 'ipc' will never work on Windows due to lack of @@ -1139,7 +1141,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze( "gitfs_refspecs": _DFLT_REFSPECS, "gitfs_disable_saltenv_mapping": False, "unique_jid": False, - "hash_type": "sha256", + "hash_type": DEFAULT_HASH_TYPE, "optimization_order": [0, 1, 2], "disable_modules": [], "disable_returners": [], @@ -1464,7 +1466,7 @@ DEFAULT_MASTER_OPTS = immutabletypes.freeze( "fileserver_ignoresymlinks": False, "fileserver_verify_config": True, "max_open_files": 100000, - "hash_type": "sha256", + "hash_type": DEFAULT_HASH_TYPE, "optimization_order": [0, 1, 2], "conf_file": os.path.join(salt.syspaths.CONFIG_DIR, "master"), "open_mode": False, diff --git a/salt/fileclient.py b/salt/fileclient.py index 0114eae21ea..b7966b2029b 100644 --- a/salt/fileclient.py +++ b/salt/fileclient.py @@ -32,6 +32,7 @@ import salt.utils.templates import salt.utils.url import salt.utils.verify import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import CommandExecutionError, MinionError, SaltClientError from salt.ext.tornado.httputil import ( HTTPHeaders, @@ -1053,7 +1054,7 @@ class PillarClient(Client): # Local file path fnd_path = fnd - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type) ret["hash_type"] = hash_type return ret @@ -1084,7 +1085,7 @@ class PillarClient(Client): except Exception: # pylint: disable=broad-except fnd_stat = None - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type) ret["hash_type"] = hash_type return ret, fnd_stat @@ -1303,7 +1304,7 @@ class RemoteClient(Client): hsum = salt.utils.hashutils.get_hash( dest, salt.utils.stringutils.to_str( - data.get("hash_type", b"md5") + data.get("hash_type", DEFAULT_HASH_TYPE) ), ) if hsum != data["hsum"]: @@ -1417,7 +1418,7 @@ class RemoteClient(Client): return {}, None else: ret = {} - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(path, form=hash_type) ret["hash_type"] = hash_type return ret diff --git a/salt/fileserver/hgfs.py b/salt/fileserver/hgfs.py index baafa46bd8c..a7f548ac6a9 100644 --- a/salt/fileserver/hgfs.py +++ b/salt/fileserver/hgfs.py @@ -35,7 +35,6 @@ will set the desired branch method. Possible values are: ``branches``, - python bindings for mercurial (``python-hglib``) """ - import copy import errno import fnmatch @@ -54,6 +53,7 @@ import salt.utils.hashutils import salt.utils.stringutils import salt.utils.url import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import FileserverConfigError from salt.utils.event import tagify @@ -308,7 +308,7 @@ def init(): # mountpoint not specified pass - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(repo_url.encode("utf-8")).hexdigest() rp_ = os.path.join(bp_, repo_hash) if not os.path.isdir(rp_): diff --git a/salt/fileserver/svnfs.py b/salt/fileserver/svnfs.py index c45365fafb6..48843f22e67 100644 --- a/salt/fileserver/svnfs.py +++ b/salt/fileserver/svnfs.py @@ -49,6 +49,7 @@ import salt.utils.path import salt.utils.stringutils import salt.utils.url import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import FileserverConfigError from salt.utils.event import tagify @@ -192,7 +193,7 @@ def init(): # mountpoint not specified pass - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(repo_url).hexdigest() rp_ = os.path.join(bp_, repo_hash) if not os.path.isdir(rp_): diff --git a/salt/modules/guestfs.py b/salt/modules/guestfs.py index 1d03ab693f2..2395bd2a1c3 100644 --- a/salt/modules/guestfs.py +++ b/salt/modules/guestfs.py @@ -11,6 +11,7 @@ import tempfile import time import salt.utils.path +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -51,7 +52,7 @@ def mount(location, access="rw", root=None): while True: if os.listdir(root): # Stuff is in there, don't use it - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) rand = hash_type(os.urandom(32)).hexdigest() root = os.path.join( tempfile.gettempdir(), diff --git a/salt/modules/test.py b/salt/modules/test.py index 62d96f52118..fe4c8ec9ae1 100644 --- a/salt/modules/test.py +++ b/salt/modules/test.py @@ -18,6 +18,7 @@ import salt.utils.hashutils import salt.utils.platform import salt.utils.versions import salt.version +from salt.config import DEFAULT_HASH_TYPE from salt.utils.decorators import depends __proxyenabled__ = ["*"] @@ -528,7 +529,7 @@ def random_hash(size=9999999999, hash_type=None): salt '*' test.random_hash hash_type=sha512 """ if not hash_type: - hash_type = __opts__.get("hash_type", "md5") + hash_type = __opts__.get("hash_type", DEFAULT_HASH_TYPE) return salt.utils.hashutils.random_hash(size=size, hash_type=hash_type) diff --git a/salt/modules/timezone.py b/salt/modules/timezone.py index 8c05d42cbb4..4904c8dcc6e 100644 --- a/salt/modules/timezone.py +++ b/salt/modules/timezone.py @@ -16,6 +16,7 @@ import salt.utils.itertools import salt.utils.path import salt.utils.platform import salt.utils.stringutils +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import CommandExecutionError, SaltInvocationError log = logging.getLogger(__name__) @@ -121,7 +122,7 @@ def _get_zone_etc_localtime(): tzfile, ) # Regular file. Try to match the hash. - hash_type = __opts__.get("hash_type", "md5") + hash_type = __opts__.get("hash_type", DEFAULT_HASH_TYPE) tzfile_hash = salt.utils.hashutils.get_hash(tzfile, hash_type) # Not a link, just a copy of the tzdata file for root, dirs, files in salt.utils.path.os_walk(tzdir): diff --git a/salt/netapi/rest_tornado/__init__.py b/salt/netapi/rest_tornado/__init__.py index 67336d0adaa..9ab2569c822 100644 --- a/salt/netapi/rest_tornado/__init__.py +++ b/salt/netapi/rest_tornado/__init__.py @@ -3,6 +3,7 @@ import logging import os import salt.auth +from salt.config import DEFAULT_HASH_TYPE from salt.utils.versions import Version __virtualname__ = os.path.abspath(__file__).rsplit(os.sep)[-2] or "rest_tornado" @@ -59,7 +60,9 @@ def get_application(opts): from . import saltnado_websockets token_pattern = r"([0-9A-Fa-f]{{{0}}})".format( - len(getattr(hashlib, opts.get("hash_type", "md5"))().hexdigest()) + len( + getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE))().hexdigest() + ) ) all_events_pattern = r"/all_events/{}".format(token_pattern) formatted_events_pattern = r"/formatted_events/{}".format(token_pattern) diff --git a/salt/pillar/hg_pillar.py b/salt/pillar/hg_pillar.py index 3a183a04568..b4ce24ac8a6 100644 --- a/salt/pillar/hg_pillar.py +++ b/salt/pillar/hg_pillar.py @@ -23,6 +23,7 @@ import os import salt.pillar import salt.utils.stringutils +from salt.config import DEFAULT_HASH_TYPE try: import hglib @@ -90,7 +91,7 @@ class Repo: """Initialize a hg repo (or open it if it already exists)""" self.repo_uri = repo_uri cachedir = os.path.join(__opts__["cachedir"], "hg_pillar") - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(salt.utils.stringutils.to_bytes(repo_uri)).hexdigest() self.working_dir = os.path.join(cachedir, repo_hash) if not os.path.isdir(self.working_dir): diff --git a/salt/tokens/localfs.py b/salt/tokens/localfs.py index 99a239d62f1..61c2d945ad3 100644 --- a/salt/tokens/localfs.py +++ b/salt/tokens/localfs.py @@ -11,6 +11,7 @@ import salt.payload import salt.utils.files import salt.utils.path import salt.utils.verify +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -27,7 +28,7 @@ def mk_token(opts, tdata): :param tdata: Token data to be stored with 'token' attribute of this dict set to the token. :returns: tdata with token if successful. Empty dict if failed. """ - hash_type = getattr(hashlib, opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE)) tok = str(hash_type(os.urandom(512)).hexdigest()) t_path = os.path.join(opts["token_dir"], tok) temp_t_path = "{}.tmp".format(t_path) diff --git a/salt/tokens/rediscluster.py b/salt/tokens/rediscluster.py index 241fe64b869..dc9bb44d3ea 100644 --- a/salt/tokens/rediscluster.py +++ b/salt/tokens/rediscluster.py @@ -13,12 +13,12 @@ Default values for these configs are as follow: :depends: - redis-py-cluster Python package """ - import hashlib import logging import os import salt.payload +from salt.config import DEFAULT_HASH_TYPE try: import rediscluster @@ -74,7 +74,7 @@ def mk_token(opts, tdata): redis_client = _redis_client(opts) if not redis_client: return {} - hash_type = getattr(hashlib, opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE)) tok = str(hash_type(os.urandom(512)).hexdigest()) try: while redis_client.get(tok) is not None: diff --git a/salt/utils/extmods.py b/salt/utils/extmods.py index 24204f40f8f..6a4d5c14440 100644 --- a/salt/utils/extmods.py +++ b/salt/utils/extmods.py @@ -11,6 +11,7 @@ import salt.utils.files import salt.utils.hashutils import salt.utils.path import salt.utils.url +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -123,7 +124,7 @@ def sync( log.info("Copying '%s' to '%s'", fn_, dest) if os.path.isfile(dest): # The file is present, if the sum differs replace it - hash_type = opts.get("hash_type", "md5") + hash_type = opts.get("hash_type", DEFAULT_HASH_TYPE) src_digest = salt.utils.hashutils.get_hash(fn_, hash_type) dst_digest = salt.utils.hashutils.get_hash(dest, hash_type) if src_digest != dst_digest: diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index f15b8316e75..a197921f6ef 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -2,7 +2,6 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo """ - import base64 import contextlib import copy @@ -37,6 +36,7 @@ import salt.utils.stringutils import salt.utils.url import salt.utils.user import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS from salt.exceptions import FileserverConfigError, GitLockError, get_error_message from salt.utils.event import tagify @@ -458,7 +458,7 @@ class GitProvider: if hasattr(self, "name"): self._cache_basehash = self.name else: - hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, self.opts.get("hash_type", DEFAULT_HASH_TYPE)) # We loaded this data from yaml configuration files, so, its safe # to use UTF-8 self._cache_basehash = str( diff --git a/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py b/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py index d039e75d29b..7469897a811 100644 --- a/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py +++ b/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py @@ -6,6 +6,7 @@ import pytest import salt.netapi.rest_tornado as rest_tornado import salt.utils.json import salt.utils.yaml +from salt.config import DEFAULT_HASH_TYPE from salt.ext.tornado.httpclient import HTTPError, HTTPRequest from salt.ext.tornado.websocket import websocket_connect @@ -51,7 +52,9 @@ async def test_websocket_handler_bad_token(client_config, http_server): A bad token should returns a 401 during a websocket connect """ token = "A" * len( - getattr(hashlib, client_config.get("hash_type", "md5"))().hexdigest() + getattr( + hashlib, client_config.get("hash_type", DEFAULT_HASH_TYPE) + )().hexdigest() ) url = "ws://127.0.0.1:{}/all_events/{}".format(http_server.port, token) From 3ed6e052626fe23eab5f3d481c3f453089240d17 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 20 Oct 2023 16:59:02 +0100 Subject: [PATCH 095/196] Don't use `hashlib.md5` Signed-off-by: Pedro Algarvio --- tests/integration/modules/test_cp.py | 4 ++-- tests/pytests/functional/states/test_archive.py | 2 +- tests/pytests/functional/states/test_file.py | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py index cd3e4c2f5ad..af873bb6784 100644 --- a/tests/integration/modules/test_cp.py +++ b/tests/integration/modules/test_cp.py @@ -89,12 +89,12 @@ class CPModuleTest(ModuleCase): """ src = os.path.join(RUNTIME_VARS.FILES, "file", "base", "file.big") with salt.utils.files.fopen(src, "rb") as fp_: - hash_str = hashlib.md5(fp_.read()).hexdigest() + hash_str = hashlib.sha256(fp_.read()).hexdigest() self.run_function("cp.get_file", ["salt://file.big", tgt], gzip=5) with salt.utils.files.fopen(tgt, "rb") as scene: data = scene.read() - self.assertEqual(hash_str, hashlib.md5(data).hexdigest()) + self.assertEqual(hash_str, hashlib.sha256(data).hexdigest()) data = salt.utils.stringutils.to_unicode(data) self.assertIn("KNIGHT: They're nervous, sire.", data) self.assertNotIn("bacon", data) diff --git a/tests/pytests/functional/states/test_archive.py b/tests/pytests/functional/states/test_archive.py index 5f97f071fbf..8d1e4755850 100644 --- a/tests/pytests/functional/states/test_archive.py +++ b/tests/pytests/functional/states/test_archive.py @@ -41,7 +41,7 @@ class TestRequestHandler(http.server.SimpleHTTPRequestHandler): ) as reqfp: return_data = reqfp.read() # We're using this checksum as the etag to show file changes - checksum = hashlib.md5(return_data).hexdigest() + checksum = hashlib.sha256(return_data).hexdigest() if none_match == checksum: # Status code 304 Not Modified is returned if the file is unchanged status_code = 304 diff --git a/tests/pytests/functional/states/test_file.py b/tests/pytests/functional/states/test_file.py index 5e637acf93f..9de115a0131 100644 --- a/tests/pytests/functional/states/test_file.py +++ b/tests/pytests/functional/states/test_file.py @@ -41,7 +41,7 @@ class RequestHandler(http.server.SimpleHTTPRequestHandler): ) as reqfp: return_text = reqfp.read().encode("utf-8") # We're using this checksum as the etag to show file changes - checksum = hashlib.md5(return_text).hexdigest() + checksum = hashlib.sha256(return_text).hexdigest() if none_match == checksum: # Status code 304 Not Modified is returned if the file is unchanged status_code = 304 From 265ec5becf3e3610c94a24e50c2740f43e2e2027 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 21 Oct 2023 09:00:32 +0100 Subject: [PATCH 096/196] Cannot currently create virtual environments on a FIPS enabled platforms See https://github.com/saltstack/salt/issues/65444 Signed-off-by: Pedro Algarvio --- tests/pytests/functional/states/test_pip_state.py | 4 ++++ tests/pytests/functional/states/test_virtualenv_mod.py | 1 + tests/support/helpers.py | 5 +++++ 3 files changed, 10 insertions(+) diff --git a/tests/pytests/functional/states/test_pip_state.py b/tests/pytests/functional/states/test_pip_state.py index 3fc6ac7a1df..551c1472feb 100644 --- a/tests/pytests/functional/states/test_pip_state.py +++ b/tests/pytests/functional/states/test_pip_state.py @@ -25,6 +25,10 @@ except ImportError: log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def _win_user_where(username, password, program): cmd = "cmd.exe /c where {}".format(program) diff --git a/tests/pytests/functional/states/test_virtualenv_mod.py b/tests/pytests/functional/states/test_virtualenv_mod.py index 7432152aced..af08c5dec21 100644 --- a/tests/pytests/functional/states/test_virtualenv_mod.py +++ b/tests/pytests/functional/states/test_virtualenv_mod.py @@ -9,6 +9,7 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False), ] diff --git a/tests/support/helpers.py b/tests/support/helpers.py index 3556e08853b..f3a73090fa7 100644 --- a/tests/support/helpers.py +++ b/tests/support/helpers.py @@ -33,6 +33,7 @@ import types import attr import pytest +import pytestskipmarkers.utils.platform from pytestshellutils.exceptions import ProcessFailed from pytestshellutils.utils import ports from pytestshellutils.utils.processes import ProcessResult @@ -1644,6 +1645,10 @@ class VirtualEnv: return pathlib.Path(self.venv_python).parent def __enter__(self): + if pytestskipmarkers.utils.platform.is_fips_enabled(): + pytest.skip( + "Test cannot currently create virtual environments on a FIPS enabled platform" + ) try: self._create_virtualenv() except subprocess.CalledProcessError: From 47ace5bec9c943520d4f92de437ee3749b029ae2 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 5 Nov 2023 19:16:17 +0000 Subject: [PATCH 097/196] Replace `md5` with `sha256` for file checksum comparissons Signed-off-by: Pedro Algarvio --- salt/modules/container_resource.py | 16 +++++++--------- salt/modules/dockermod.py | 12 ++++++------ 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/salt/modules/container_resource.py b/salt/modules/container_resource.py index ceec72a7b20..0a44ce3e518 100644 --- a/salt/modules/container_resource.py +++ b/salt/modules/container_resource.py @@ -69,15 +69,13 @@ def _nsenter(pid): return f"nsenter --target {pid} --mount --uts --ipc --net --pid" -def _get_md5(name, path, run_func): +def _get_sha256(name, path, run_func): """ - Get the MD5 checksum of a file from a container + Get the sha256 checksum of a file from a container """ - output = run_func(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True)[ - "stdout" - ] + ret = run_func(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True) try: - return output.split()[0] + return ret["stdout"].split()[0] except IndexError: # Destination file does not exist or could not be accessed return None @@ -368,8 +366,8 @@ def copy_to( ) # Before we try to replace the file, compare checksums. - source_md5 = __salt__["file.get_sum"](local_file, "md5") - if source_md5 == _get_md5(name, dest, run_all): + source_sha256 = __salt__["file.get_sum"](local_file, "sha256") + if source_sha256 == _get_sha256(name, dest, run_all): log.debug("%s and %s:%s are the same file, skipping copy", source, name, dest) return True @@ -399,4 +397,4 @@ def copy_to( local_file, name, PATH, dest ) __salt__["cmd.run"](copy_cmd, python_shell=True, output_loglevel="quiet") - return source_md5 == _get_md5(name, dest, run_all) + return source_sha256 == _get_sha256(name, dest, run_all) diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py index b58fd1b32bc..415c03d24b7 100644 --- a/salt/modules/dockermod.py +++ b/salt/modules/dockermod.py @@ -525,11 +525,11 @@ def _clear_context(): pass -def _get_md5(name, path): +def _get_sha256(name, path): """ - Get the MD5 checksum of a file from a container + Get the sha256 checksum of a file from a container """ - output = run_stdout(name, f"md5sum {shlex.quote(path)}", ignore_retcode=True) + output = run_stdout(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True) try: return output.split()[0] except IndexError: @@ -3628,8 +3628,8 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): raise SaltInvocationError(f"Source file {source} does not exist") # Before we try to replace the file, compare checksums. - source_md5 = _get_md5(name, source) - if source_md5 == __salt__["file.get_sum"](dest, "md5"): + source_sha256 = _get_sha256(name, source) + if source_sha256 == __salt__["file.get_sum"](dest, "sha256"): log.debug("%s:%s and %s are the same file, skipping copy", name, source, dest) return True @@ -3641,7 +3641,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): src_path = f"{name}:{source}" cmd = ["docker", "cp", src_path, dest_dir] __salt__["cmd.run"](cmd, python_shell=False) - return source_md5 == __salt__["file.get_sum"](dest, "md5") + return source_sha256 == __salt__["file.get_sum"](dest, "sha256") # Docker cp gets a file from the container, alias this to copy_from From 023b1504d0838e8bbe76dfc616717552cceefc8b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 6 Nov 2023 13:24:23 +0000 Subject: [PATCH 098/196] Remove unused variables Signed-off-by: Pedro Algarvio --- tests/pytests/functional/states/test_module.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/tests/pytests/functional/states/test_module.py b/tests/pytests/functional/states/test_module.py index b9afb4f0926..b3ee27f7285 100644 --- a/tests/pytests/functional/states/test_module.py +++ b/tests/pytests/functional/states/test_module.py @@ -10,8 +10,6 @@ log = logging.getLogger(__name__) @pytest.mark.core_test def test_issue_58763(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ run_old: @@ -42,8 +40,6 @@ def test_issue_58763(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_58763_a(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test.random_hash: @@ -68,8 +64,6 @@ def test_issue_58763_a(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_58763_b(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test.ping: @@ -90,8 +84,6 @@ def test_issue_58763_b(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_62988_a(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test_foo: @@ -120,8 +112,6 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog): @pytest.mark.core_test def test_issue_62988_b(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test_foo: From dc365fdce3bf80edd89f2eb4ac39182bda06e27a Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 6 Nov 2023 13:27:32 +0000 Subject: [PATCH 099/196] Switch to `sha256` as the `hash_type` Signed-off-by: Pedro Algarvio --- tests/pytests/functional/states/test_module.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/tests/pytests/functional/states/test_module.py b/tests/pytests/functional/states/test_module.py index b3ee27f7285..019c085c87b 100644 --- a/tests/pytests/functional/states/test_module.py +++ b/tests/pytests/functional/states/test_module.py @@ -16,13 +16,13 @@ def test_issue_58763(tmp_path, modules, state_tree, caplog): module.run: - name: test.random_hash - size: 10 - - hash_type: md5 + - hash_type: sha256 run_new: module.run: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree): @@ -45,7 +45,7 @@ def test_issue_58763_a(tmp_path, modules, state_tree, caplog): test.random_hash: module.run: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree): @@ -93,7 +93,7 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog): module.wait: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 - watch: - test: test_foo """ @@ -123,7 +123,7 @@ def test_issue_62988_b(tmp_path, modules, state_tree, caplog): module.wait: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-62988.sls", sls_contents, state_tree): From c5db6bf7669171f1ba25043e1c3b78d3bbe52ae6 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 7 Nov 2023 12:23:40 +0000 Subject: [PATCH 100/196] Flush the logging handler just to be sure Signed-off-by: Pedro Algarvio --- tests/pytests/unit/client/ssh/test_single.py | 25 +++++++++++--------- 1 file changed, 14 insertions(+), 11 deletions(-) diff --git a/tests/pytests/unit/client/ssh/test_single.py b/tests/pytests/unit/client/ssh/test_single.py index c88a1c2127f..c5b733372af 100644 --- a/tests/pytests/unit/client/ssh/test_single.py +++ b/tests/pytests/unit/client/ssh/test_single.py @@ -19,17 +19,13 @@ log = logging.getLogger(__name__) @pytest.fixture -def opts(tmp_path): - return { - "argv": [ - "ssh.set_auth_key", - "root", - "hobn+amNAXSBTiOXEqlBjGB...rsa root@master", - ], - "__role": "master", - "cachedir": str(tmp_path), - "extension_modules": str(tmp_path / "extmods"), - } +def opts(master_opts): + master_opts["argv"] = [ + "ssh.set_auth_key", + "root", + "hobn+amNAXSBTiOXEqlBjGB...rsa root@master", + ] + return master_opts @pytest.fixture @@ -411,6 +407,10 @@ def test_run_ssh_pre_flight_no_connect(opts, target, tmp_path, caplog): with caplog.at_level(logging.TRACE): with patch_send, patch_exec_cmd, patch_tmp: ret = single.run_ssh_pre_flight() + + # Flush the logging handler just to be sure + caplog.handler.flush() + assert "Copying the pre flight script" in caplog.text assert "Could not copy the pre flight script to target" in caplog.text assert ret == ret_send @@ -503,6 +503,9 @@ def test_run_ssh_pre_flight_connect(opts, target, tmp_path, caplog): with patch_send, patch_exec_cmd, patch_tmp: ret = single.run_ssh_pre_flight() + # Flush the logging handler just to be sure + caplog.handler.flush() + assert "Executing the pre flight script on target" in caplog.text assert ret == ret_exec_cmd assert send_mock.call_args_list[0][0][0] == tmp_file From 066afb90f0ae6b79035006a0512fd022c76bba15 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 27 Oct 2023 16:29:05 +0100 Subject: [PATCH 101/196] Skip tests which can't run, or even pass on FIPS enabled platforms Signed-off-by: Pedro Algarvio --- .pylintrc | 3 +- salt/pillar/sql_base.py | 8 +-- .../cloud/clouds/test_digitalocean.py | 4 +- .../integration/externalapi/test_venafiapi.py | 9 +-- tests/integration/states/test_archive.py | 10 +++ tests/pytests/functional/cache/test_consul.py | 1 + .../modules/state/test_jinja_filters.py | 7 ++- .../pytests/functional/modules/test_mysql.py | 1 + .../functional/modules/test_x509_v2.py | 3 +- .../pytests/functional/states/test_x509_v2.py | 61 ++++++++++--------- .../transport/ipc/test_pub_server_channel.py | 3 +- .../tcp/test_load_balanced_server.py | 1 + .../zeromq/test_pub_server_channel.py | 1 + .../integration/daemons/test_memory_leak.py | 1 + .../pytests/integration/ssh/test_saltcheck.py | 8 +++ .../integration/states/test_x509_v2.py | 1 + tests/pytests/unit/cloud/test_cloud.py | 1 + tests/pytests/unit/cloud/test_map.py | 2 + tests/pytests/unit/modules/test_hashutil.py | 1 + tests/pytests/unit/modules/test_postgres.py | 5 ++ .../unit/states/postgresql/test_group.py | 5 ++ .../unit/states/postgresql/test_user.py | 3 + .../unit/states/test_boto_cloudwatch_event.py | 1 + tests/pytests/unit/states/test_boto_iot.py | 1 + .../utils/jinja/test_custom_extensions.py | 2 +- .../unit/utils/jinja/test_get_template.py | 1 - tests/support/pytest/mysql.py | 5 ++ .../unit/modules/test_boto3_elasticsearch.py | 4 ++ tests/unit/modules/test_boto3_route53.py | 4 ++ tests/unit/modules/test_boto_apigateway.py | 4 ++ tests/unit/modules/test_boto_cloudtrail.py | 4 ++ .../modules/test_boto_cloudwatch_event.py | 4 ++ .../unit/modules/test_boto_cognitoidentity.py | 4 ++ .../modules/test_boto_elasticsearch_domain.py | 4 ++ tests/unit/modules/test_boto_iot.py | 4 ++ tests/unit/modules/test_boto_lambda.py | 4 ++ tests/unit/modules/test_boto_s3_bucket.py | 4 ++ tests/unit/modules/test_virt.py | 12 ++-- tests/unit/modules/test_zcbuildout.py | 3 +- tests/unit/states/test_boto_apigateway.py | 4 ++ .../unit/states/test_boto_cognitoidentity.py | 4 ++ tests/unit/states/test_zcbuildout.py | 3 +- tests/unit/utils/test_boto3mod.py | 4 ++ tests/unit/utils/test_botomod.py | 5 ++ tests/unit/utils/test_find.py | 1 + tests/unit/utils/test_hashutils.py | 3 + 46 files changed, 168 insertions(+), 60 deletions(-) diff --git a/.pylintrc b/.pylintrc index be586e1ed34..3991b5df08e 100644 --- a/.pylintrc +++ b/.pylintrc @@ -698,7 +698,8 @@ allowed-3rd-party-modules=msgpack, ptscripts, packaging, looseversion, - pytestskipmarkers + pytestskipmarkers, + cryptography [EXCEPTIONS] diff --git a/salt/pillar/sql_base.py b/salt/pillar/sql_base.py index 372dced91cc..3edd3ad0a87 100644 --- a/salt/pillar/sql_base.py +++ b/salt/pillar/sql_base.py @@ -198,22 +198,20 @@ More complete example for MySQL (to also show configuration) with_lists: [1,3] """ -import abc # Added in python2.6 so always available +import abc import logging from salt.utils.dictupdate import update from salt.utils.odict import OrderedDict +log = logging.getLogger(__name__) + # Please don't strip redundant parentheses from this file. # I have added some for clarity. # tests/unit/pillar/mysql_test.py may help understand this code. -# Set up logging -log = logging.getLogger(__name__) - - # This ext_pillar is abstract and cannot be used directory def __virtual__(): return False diff --git a/tests/integration/cloud/clouds/test_digitalocean.py b/tests/integration/cloud/clouds/test_digitalocean.py index e92f57d8aa2..64ad0f17426 100644 --- a/tests/integration/cloud/clouds/test_digitalocean.py +++ b/tests/integration/cloud/clouds/test_digitalocean.py @@ -1,10 +1,11 @@ """ Integration tests for DigitalOcean APIv2 """ - import base64 import hashlib +import pytest + import salt.crypt import salt.utils.stringutils from tests.integration.cloud.helpers.cloud_test_base import TIMEOUT, CloudTest @@ -43,6 +44,7 @@ class DigitalOceanTest(CloudTest): _list_sizes = self.run_cloud("--list-sizes {}".format(self.PROVIDER)) self.assertIn("16gb", [i.strip() for i in _list_sizes]) + @pytest.mark.skip_on_fips_enabled_platform def test_key_management(self): """ Test key management diff --git a/tests/integration/externalapi/test_venafiapi.py b/tests/integration/externalapi/test_venafiapi.py index ad08605430f..c9d44dce50c 100644 --- a/tests/integration/externalapi/test_venafiapi.py +++ b/tests/integration/externalapi/test_venafiapi.py @@ -43,13 +43,10 @@ class VenafiTest(ShellCase): @with_random_name @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_request(self, name): cn = "{}.example.com".format(name) - # Provide python27 compatibility - if not isinstance(cn, str): - cn = cn.decode() - ret = self.run_run_plus( fun="venafi.request", minion_id=cn, @@ -126,10 +123,6 @@ xlAKgaU6i03jOm5+sww5L2YVMi1eeBN+kx7o94ogpRemC/EUidvl1PUJ6+e7an9V csr_path = f.name cn = "test-csr-32313131.venafi.example.com" - # Provide python27 compatibility - if not isinstance(cn, str): - cn = cn.decode() - ret = self.run_run_plus( fun="venafi.request", minion_id=cn, csr_path=csr_path, zone="fake" ) diff --git a/tests/integration/states/test_archive.py b/tests/integration/states/test_archive.py index 7d2dba52210..d940db5ecd2 100644 --- a/tests/integration/states/test_archive.py +++ b/tests/integration/states/test_archive.py @@ -106,6 +106,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_source_hash(self): """ test archive.extracted without skip_verify @@ -127,6 +128,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.skip_if_not_root + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_root_user_and_group(self): """ test archive.extracted with user and group set to "root" @@ -151,6 +153,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_strip_in_options(self): """ test archive.extracted with --strip in options @@ -170,6 +173,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(os.path.join(ARCHIVE_DIR, "README")) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_strip_components_in_options(self): """ test archive.extracted with --strip-components in options @@ -190,6 +194,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(os.path.join(ARCHIVE_DIR, "README")) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_without_archive_format(self): """ test archive.extracted with no archive_format option @@ -206,6 +211,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_cmd_unzip_false(self): """ test archive.extracted using use_cmd_unzip argument as false @@ -240,6 +246,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_skip_verify(self): """ test archive.extracted with local file, bad hash and skip_verify @@ -258,6 +265,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_source_hash(self): """ test archive.extracted with local file and valid hash @@ -275,6 +283,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_bad_source_hash(self): """ test archive.extracted with local file and bad hash @@ -289,6 +298,7 @@ class ArchiveTest(ModuleCase, SaltReturnAssertsMixin): self.assertSaltFalseReturn(ret) + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_uppercase_source_hash(self): """ test archive.extracted with local file and bad hash diff --git a/tests/pytests/functional/cache/test_consul.py b/tests/pytests/functional/cache/test_consul.py index 3a38e495a93..0a42913b6c2 100644 --- a/tests/pytests/functional/cache/test_consul.py +++ b/tests/pytests/functional/cache/test_consul.py @@ -14,6 +14,7 @@ docker = pytest.importorskip("docker") log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("dockerd"), ] diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py index 220310aaaf0..59777cee196 100644 --- a/tests/pytests/functional/modules/state/test_jinja_filters.py +++ b/tests/pytests/functional/modules/state/test_jinja_filters.py @@ -6,6 +6,7 @@ import os import attr import pytest +from pytestskipmarkers.utils import platform import salt.utils.files import salt.utils.path @@ -932,7 +933,11 @@ def _filter_id(value): ids=_filter_id, ) def filter(request): - return request.param + _filter = request.param + if platform.is_fips_enabled(): + if _filter.name in ("md5", "random_hash"): + pytest.skip("Test cannot run on a FIPS enabled platform") + return _filter def test_filter(state, state_tree, filter, grains): diff --git a/tests/pytests/functional/modules/test_mysql.py b/tests/pytests/functional/modules/test_mysql.py index c37a508588b..d920bbdbc03 100644 --- a/tests/pytests/functional/modules/test_mysql.py +++ b/tests/pytests/functional/modules/test_mysql.py @@ -19,6 +19,7 @@ pytestmark = [ pytest.mark.skipif( mysqlmod.MySQLdb is None, reason="No python mysql client installed." ), + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/functional/modules/test_x509_v2.py b/tests/pytests/functional/modules/test_x509_v2.py index 42b55d66a6c..dfb973af108 100644 --- a/tests/pytests/functional/modules/test_x509_v2.py +++ b/tests/pytests/functional/modules/test_x509_v2.py @@ -23,7 +23,8 @@ except ImportError: CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split(".")) pytestmark = [ - pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library") + pytest.mark.skip_on_fips_enabled_platform, + pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"), ] diff --git a/tests/pytests/functional/states/test_x509_v2.py b/tests/pytests/functional/states/test_x509_v2.py index 7409e6683ed..3cd09d7d840 100644 --- a/tests/pytests/functional/states/test_x509_v2.py +++ b/tests/pytests/functional/states/test_x509_v2.py @@ -1,5 +1,5 @@ import base64 -from pathlib import Path +import pathlib import pytest @@ -26,6 +26,7 @@ CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split(".") pytestmark = [ pytest.mark.slow_test, pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"), + pytest.mark.skip_on_fips_enabled_platform, ] @@ -703,7 +704,7 @@ def existing_pk(x509, pk_args, request): @pytest.fixture(params=["existing_cert"]) def existing_symlink(request): existing = request.getfixturevalue(request.param) - test_file = Path(existing).with_name("symlink") + test_file = pathlib.Path(existing).with_name("symlink") test_file.symlink_to(existing) yield test_file # cleanup is done by tmp_path @@ -884,7 +885,7 @@ def test_certificate_managed_test_true(x509, cert_args, rsa_privkey, ca_key): ret = x509.certificate_managed(**cert_args) assert ret.result is None assert ret.changes - assert not Path(cert_args["name"]).exists() + assert not pathlib.Path(cert_args["name"]).exists() @pytest.mark.usefixtures("existing_cert") @@ -1324,7 +1325,7 @@ def test_certificate_managed_file_managed_create_false( ret = x509.certificate_managed(**cert_args) assert ret.result is True assert not ret.changes - assert not Path(cert_args["name"]).exists() + assert not pathlib.Path(cert_args["name"]).exists() @pytest.mark.usefixtures("existing_cert") @@ -1397,7 +1398,7 @@ def test_certificate_managed_follow_symlinks( """ cert_args["name"] = str(existing_symlink) cert_args["encoding"] = encoding - assert Path(cert_args["name"]).is_symlink() + assert pathlib.Path(cert_args["name"]).is_symlink() cert_args["follow_symlinks"] = follow ret = x509.certificate_managed(**cert_args) assert bool(ret.changes) == (not follow) @@ -1417,13 +1418,13 @@ def test_certificate_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ cert_args["name"] = str(existing_symlink) - assert Path(cert_args["name"]).is_symlink() + assert pathlib.Path(cert_args["name"]).is_symlink() cert_args["follow_symlinks"] = follow cert_args["encoding"] = encoding cert_args["CN"] = "new" ret = x509.certificate_managed(**cert_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -1436,7 +1437,7 @@ def test_certificate_managed_file_managed_error( cert_args["private_key"] = rsa_privkey cert_args["makedirs"] = False cert_args["encoding"] = encoding - cert_args["name"] = str(Path(cert_args["name"]).parent / "missing" / "cert") + cert_args["name"] = str(pathlib.Path(cert_args["name"]).parent / "missing" / "cert") ret = x509.certificate_managed(**cert_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -1504,7 +1505,7 @@ def test_crl_managed_test_true(x509, crl_args, crl_revoked): assert ret.result is None assert ret.changes assert ret.result is None - assert not Path(crl_args["name"]).exists() + assert not pathlib.Path(crl_args["name"]).exists() @pytest.mark.usefixtures("existing_crl") @@ -1708,7 +1709,7 @@ def test_crl_managed_file_managed_create_false(x509, crl_args): ret = x509.crl_managed(**crl_args) assert ret.result is True assert not ret.changes - assert not Path(crl_args["name"]).exists() + assert not pathlib.Path(crl_args["name"]).exists() @pytest.mark.usefixtures("existing_crl") @@ -1782,7 +1783,7 @@ def test_crl_managed_follow_symlinks( """ crl_args["name"] = str(existing_symlink) crl_args["encoding"] = encoding - assert Path(crl_args["name"]).is_symlink() + assert pathlib.Path(crl_args["name"]).is_symlink() crl_args["follow_symlinks"] = follow ret = x509.crl_managed(**crl_args) assert bool(ret.changes) == (not follow) @@ -1802,13 +1803,13 @@ def test_crl_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ crl_args["name"] = str(existing_symlink) - assert Path(crl_args["name"]).is_symlink() + assert pathlib.Path(crl_args["name"]).is_symlink() crl_args["follow_symlinks"] = follow crl_args["encoding"] = encoding crl_args["revoked"] = crl_revoked ret = x509.crl_managed(**crl_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -1818,7 +1819,7 @@ def test_crl_managed_file_managed_error(x509, crl_args, encoding): """ crl_args["makedirs"] = False crl_args["encoding"] = encoding - crl_args["name"] = str(Path(crl_args["name"]).parent / "missing" / "crl") + crl_args["name"] = str(pathlib.Path(crl_args["name"]).parent / "missing" / "crl") ret = x509.crl_managed(**crl_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -1866,7 +1867,7 @@ def test_csr_managed_test_true(x509, csr_args, rsa_privkey): ret = x509.csr_managed(**csr_args) assert ret.result is None assert ret.changes - assert not Path(csr_args["name"]).exists() + assert not pathlib.Path(csr_args["name"]).exists() @pytest.mark.usefixtures("existing_csr") @@ -2002,7 +2003,7 @@ def test_csr_managed_file_managed_create_false(x509, csr_args): ret = x509.csr_managed(**csr_args) assert ret.result is True assert not ret.changes - assert not Path(csr_args["name"]).exists() + assert not pathlib.Path(csr_args["name"]).exists() @pytest.mark.usefixtures("existing_csr") @@ -2066,12 +2067,12 @@ def test_csr_managed_follow_symlinks( the checking of the existing file is performed by the x509 module """ csr_args["name"] = str(existing_symlink) - assert Path(csr_args["name"]).is_symlink() + assert pathlib.Path(csr_args["name"]).is_symlink() csr_args["follow_symlinks"] = follow csr_args["encoding"] = encoding ret = x509.csr_managed(**csr_args) assert bool(ret.changes) == (not follow) - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize( @@ -2088,14 +2089,14 @@ def test_csr_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ csr_args["name"] = str(existing_symlink) - assert Path(csr_args["name"]).is_symlink() + assert pathlib.Path(csr_args["name"]).is_symlink() csr_args["follow_symlinks"] = follow csr_args["encoding"] = encoding csr_args["CN"] = "new" ret = x509.csr_managed(**csr_args) assert ret.result assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -2105,7 +2106,7 @@ def test_csr_managed_file_managed_error(x509, csr_args, encoding): """ csr_args["makedirs"] = False csr_args["encoding"] = encoding - csr_args["name"] = str(Path(csr_args["name"]).parent / "missing" / "csr") + csr_args["name"] = str(pathlib.Path(csr_args["name"]).parent / "missing" / "csr") ret = x509.csr_managed(**csr_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -2312,7 +2313,7 @@ def test_private_key_managed_file_managed_create_false(x509, pk_args): ret = x509.private_key_managed(**pk_args) assert ret.result is True assert not ret.changes - assert not Path(pk_args["name"]).exists() + assert not pathlib.Path(pk_args["name"]).exists() @pytest.mark.usefixtures("existing_pk") @@ -2361,7 +2362,7 @@ def test_private_key_managed_follow_symlinks( """ pk_args["name"] = str(existing_symlink) pk_args["encoding"] = encoding - assert Path(pk_args["name"]).is_symlink() + assert pathlib.Path(pk_args["name"]).is_symlink() pk_args["follow_symlinks"] = follow ret = x509.private_key_managed(**pk_args) assert bool(ret.changes) == (not follow) @@ -2381,13 +2382,13 @@ def test_private_key_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ pk_args["name"] = str(existing_symlink) - assert Path(pk_args["name"]).is_symlink() + assert pathlib.Path(pk_args["name"]).is_symlink() pk_args["follow_symlinks"] = follow pk_args["encoding"] = encoding pk_args["algo"] = "ec" ret = x509.private_key_managed(**pk_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.usefixtures("existing_pk") @@ -2415,7 +2416,7 @@ def test_private_key_managed_file_managed_error(x509, pk_args, encoding): """ pk_args["makedirs"] = False pk_args["encoding"] = encoding - pk_args["name"] = str(Path(pk_args["name"]).parent / "missing" / "pk") + pk_args["name"] = str(pathlib.Path(pk_args["name"]).parent / "missing" / "pk") ret = x509.private_key_managed(**pk_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -2693,7 +2694,7 @@ def _assert_cert_basic( def _get_cert(cert, encoding="pem", passphrase=None): try: - p = Path(cert) + p = pathlib.Path(cert) if p.exists(): cert = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2775,7 +2776,7 @@ def _assert_not_changed(ret): def _get_crl(crl, encoding="pem"): try: - p = Path(crl) + p = pathlib.Path(crl) if p.exists(): crl = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2793,7 +2794,7 @@ def _get_crl(crl, encoding="pem"): def _get_csr(csr, encoding="pem"): try: - p = Path(csr) + p = pathlib.Path(csr) if p.exists(): csr = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2811,7 +2812,7 @@ def _get_csr(csr, encoding="pem"): def _get_privkey(pk, encoding="pem", passphrase=None): try: - p = Path(pk) + p = pathlib.Path(pk) if p.exists(): pk = p.read_bytes() except Exception: # pylint: disable=broad-except diff --git a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py index f9360297aa4..63d7239968d 100644 --- a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py @@ -13,9 +13,10 @@ log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", - ) + ), ] diff --git a/tests/pytests/functional/transport/tcp/test_load_balanced_server.py b/tests/pytests/functional/transport/tcp/test_load_balanced_server.py index cfc25f917e5..9ab429b1ff4 100644 --- a/tests/pytests/functional/transport/tcp/test_load_balanced_server.py +++ b/tests/pytests/functional/transport/tcp/test_load_balanced_server.py @@ -12,6 +12,7 @@ pytestmark = [ ] +@pytest.mark.skip_on_fips_enabled_platform def test_tcp_load_balancer_server(master_opts, io_loop): messages = [] diff --git a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py index 27a315fda91..2a357c7c5db 100644 --- a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py @@ -12,6 +12,7 @@ log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_freebsd(reason="Temporarily skipped on FreeBSD."), pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", diff --git a/tests/pytests/integration/daemons/test_memory_leak.py b/tests/pytests/integration/daemons/test_memory_leak.py index 1b782760418..fb608fc1864 100644 --- a/tests/pytests/integration/daemons/test_memory_leak.py +++ b/tests/pytests/integration/daemons/test_memory_leak.py @@ -44,6 +44,7 @@ def file_add_delete_sls(testfile_path, base_env_state_tree_root_dir): yield sls_name +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.skip_on_darwin(reason="MacOS is a spawning platform, won't work") @pytest.mark.flaky(max_runs=4) def test_memory_leak(salt_cli, salt_minion, file_add_delete_sls): diff --git a/tests/pytests/integration/ssh/test_saltcheck.py b/tests/pytests/integration/ssh/test_saltcheck.py index 51068850265..a4cd6f3d8e0 100644 --- a/tests/pytests/integration/ssh/test_saltcheck.py +++ b/tests/pytests/integration/ssh/test_saltcheck.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform pytestmark = [ pytest.mark.slow_test, @@ -6,6 +7,12 @@ pytestmark = [ ] +@pytest.fixture +def _skip_on_fips_and_arm64(grains): + if platform.is_fips_enabled() and grains["cpuarch"] == "aarch64": + pytest.skip("Test cannot run on a FIPS enabled platform") + + def test_saltcheck_run_test(salt_ssh_cli): """ test saltcheck.run_test with salt-ssh @@ -23,6 +30,7 @@ def test_saltcheck_run_test(salt_ssh_cli): assert ret.data["status"] == "Pass" +@pytest.mark.usefixtures("_skip_on_fips_and_arm64") def test_saltcheck_state(salt_ssh_cli): """ saltcheck.run_state_tests diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index be01852919b..b13a2a8922a 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -666,6 +666,7 @@ def test_privkey_new_with_prereq(x509_salt_call_cli, tmp_path): assert not _belongs_to(cert_new, pk_cur) +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.usefixtures("privkey_new_pkcs12") @pytest.mark.skipif( CRYPTOGRAPHY_VERSION[0] < 36, diff --git a/tests/pytests/unit/cloud/test_cloud.py b/tests/pytests/unit/cloud/test_cloud.py index bd8595dcf86..ecdab4de575 100644 --- a/tests/pytests/unit/cloud/test_cloud.py +++ b/tests/pytests/unit/cloud/test_cloud.py @@ -126,6 +126,7 @@ def test_vm_config_merger(): assert expected == vm +@pytest.mark.skip_on_fips_enabled_platform def test_cloud_run_profile_create_returns_boolean(master_config): master_config["profiles"] = {"test_profile": {"provider": "test_provider:saltify"}} diff --git a/tests/pytests/unit/cloud/test_map.py b/tests/pytests/unit/cloud/test_map.py index 06f71b6d6e5..ce2999003e7 100644 --- a/tests/pytests/unit/cloud/test_map.py +++ b/tests/pytests/unit/cloud/test_map.py @@ -99,6 +99,8 @@ def salt_cloud_config_file(salt_master_factory): return os.path.join(salt_master_factory.config_dir, "cloud") +# The cloud map merge uses python's multiprocessing manager which authenticates using HMAC and MD5 +@pytest.mark.skip_on_fips_enabled_platform def test_cloud_map_merge_conf(salt_cloud_config_file, grains): """ Ensure that nested values can be selectivly overridden in a map file diff --git a/tests/pytests/unit/modules/test_hashutil.py b/tests/pytests/unit/modules/test_hashutil.py index d8f2195c174..c91e99ce6b7 100644 --- a/tests/pytests/unit/modules/test_hashutil.py +++ b/tests/pytests/unit/modules/test_hashutil.py @@ -61,6 +61,7 @@ def test_base64_decodestring(the_string, the_string_base64): assert hashutil.base64_decodestring(the_string_base64) == the_string +@pytest.mark.skip_on_fips_enabled_platform def test_md5_digest(the_string, the_string_md5): assert hashutil.md5_digest(the_string) == the_string_md5 diff --git a/tests/pytests/unit/modules/test_postgres.py b/tests/pytests/unit/modules/test_postgres.py index b9178fa038e..b828e8204b9 100644 --- a/tests/pytests/unit/modules/test_postgres.py +++ b/tests/pytests/unit/modules/test_postgres.py @@ -2,6 +2,7 @@ import datetime import re import pytest +from pytestskipmarkers.utils import platform import salt.modules.config as configmod import salt.modules.postgres as postgres @@ -117,6 +118,8 @@ def idfn(val): ids=idfn, ) def test_verify_password(role, password, verifier, method, result): + if platform.is_fips_enabled() and (method == "md5" or verifier == md5_pw): + pytest.skip("Test cannot run on a FIPS enabled platform") assert postgres._verify_password(role, password, verifier, method) == result @@ -971,6 +974,7 @@ def test_user_update3(): ) +@pytest.mark.skip_on_fips_enabled_platform def test_user_update_encrypted_passwd(): with patch( "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) @@ -1226,6 +1230,7 @@ def test_create_extension_newerthan(): assert not postgres.create_extension("foo", ext_version="a", schema="b") +@pytest.mark.skip_on_fips_enabled_platform def test_encrypt_passwords(): assert postgres._maybe_encrypt_password("foo", "bar", False) == "bar" assert ( diff --git a/tests/pytests/unit/states/postgresql/test_group.py b/tests/pytests/unit/states/postgresql/test_group.py index 2eb77bf4c0f..6957ce54540 100644 --- a/tests/pytests/unit/states/postgresql/test_group.py +++ b/tests/pytests/unit/states/postgresql/test_group.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform import salt.modules.postgres as postgres import salt.states.postgres_group as postgres_group @@ -19,6 +20,8 @@ def fixture_db_args(): @pytest.fixture(name="md5_pw") def fixture_md5_pw(): + if platform.is_fips_enabled(): + pytest.skip("Test cannot run on a FIPS enabled platform") # 'md5' + md5('password' + 'groupname') return "md58b14c378fab8ef0dc227f4e6d6787a87" @@ -79,6 +82,7 @@ def configure_loader_modules(mocks): # ========== +@pytest.mark.skip_on_fips_enabled_platform def test_present_create_basic(mocks, db_args): assert postgres_group.present("groupname") == { "name": "groupname", @@ -343,6 +347,7 @@ def test_present_update_md5_password(mocks, existing_group, md5_pw, db_args): ) +@pytest.mark.skip_on_fips_enabled_platform def test_present_update_error(mocks, existing_group): existing_group["password"] = "md500000000000000000000000000000000" mocks["postgres.role_get"].return_value = existing_group diff --git a/tests/pytests/unit/states/postgresql/test_user.py b/tests/pytests/unit/states/postgresql/test_user.py index 46d76535144..1d5dba9b1bb 100644 --- a/tests/pytests/unit/states/postgresql/test_user.py +++ b/tests/pytests/unit/states/postgresql/test_user.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform import salt.modules.postgres as postgres import salt.states.postgres_user as postgres_user @@ -25,6 +26,8 @@ def fixture_db_args(): @pytest.fixture(name="md5_pw") def fixture_md5_pw(): # 'md5' + md5('password' + 'username') + if platform.is_fips_enabled(): + pytest.skip("Test cannot run on a FIPS enabled platform") return "md55a231fcdb710d73268c4f44283487ba2" diff --git a/tests/pytests/unit/states/test_boto_cloudwatch_event.py b/tests/pytests/unit/states/test_boto_cloudwatch_event.py index 2974947e60e..684744464e7 100644 --- a/tests/pytests/unit/states/test_boto_cloudwatch_event.py +++ b/tests/pytests/unit/states/test_boto_cloudwatch_event.py @@ -17,6 +17,7 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/unit/states/test_boto_iot.py b/tests/pytests/unit/states/test_boto_iot.py index 594cd9982bb..6da6628b655 100644 --- a/tests/pytests/unit/states/test_boto_iot.py +++ b/tests/pytests/unit/states/test_boto_iot.py @@ -18,6 +18,7 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/unit/utils/jinja/test_custom_extensions.py b/tests/pytests/unit/utils/jinja/test_custom_extensions.py index 4d004230fcb..d213b69709d 100644 --- a/tests/pytests/unit/utils/jinja/test_custom_extensions.py +++ b/tests/pytests/unit/utils/jinja/test_custom_extensions.py @@ -46,7 +46,6 @@ def minion_opts(tmp_path, minion_opts): "file_roots": {"test": [str(tmp_path / "templates")]}, "pillar_roots": {"test": [str(tmp_path / "templates")]}, "fileserver_backend": ["roots"], - "hash_type": "md5", "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), @@ -1041,6 +1040,7 @@ def test_method_call(minion_opts, local_salt): assert rendered == "None" +@pytest.mark.skip_on_fips_enabled_platform def test_md5(minion_opts, local_salt): """ Test the `md5` Jinja filter. diff --git a/tests/pytests/unit/utils/jinja/test_get_template.py b/tests/pytests/unit/utils/jinja/test_get_template.py index 35fc188b812..cdba34fa171 100644 --- a/tests/pytests/unit/utils/jinja/test_get_template.py +++ b/tests/pytests/unit/utils/jinja/test_get_template.py @@ -61,7 +61,6 @@ def minion_opts(tmp_path, minion_opts): "file_roots": {"test": [str(tmp_path / "files" / "test")]}, "pillar_roots": {"test": [str(tmp_path / "files" / "test")]}, "fileserver_backend": ["roots"], - "hash_type": "md5", "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), diff --git a/tests/support/pytest/mysql.py b/tests/support/pytest/mysql.py index 337a4f8e642..ac3b6601d7f 100644 --- a/tests/support/pytest/mysql.py +++ b/tests/support/pytest/mysql.py @@ -3,6 +3,7 @@ import time import attr import pytest +from pytestskipmarkers.utils import platform from saltfactories.utils import random_string # This `pytest.importorskip` here actually works because this module @@ -102,6 +103,10 @@ def mysql_image(request): @pytest.fixture(scope="module") def create_mysql_combo(mysql_image): + if platform.is_fips_enabled(): + if mysql_image.name in ("mysql-server", "percona") and mysql_image.tag == "8.0": + pytest.skip(f"These tests fail on {mysql_image.name}:{mysql_image.tag}") + return MySQLCombo( mysql_name=mysql_image.name, mysql_version=mysql_image.tag, diff --git a/tests/unit/modules/test_boto3_elasticsearch.py b/tests/unit/modules/test_boto3_elasticsearch.py index 6b82c0abba7..0e60a9e0746 100644 --- a/tests/unit/modules/test_boto3_elasticsearch.py +++ b/tests/unit/modules/test_boto3_elasticsearch.py @@ -28,6 +28,10 @@ except ImportError: # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def __virtual__(): """ diff --git a/tests/unit/modules/test_boto3_route53.py b/tests/unit/modules/test_boto3_route53.py index 9d421471942..5e7332fbb35 100644 --- a/tests/unit/modules/test_boto3_route53.py +++ b/tests/unit/modules/test_boto3_route53.py @@ -25,6 +25,10 @@ except ImportError: # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def __virtual__(): """ diff --git a/tests/unit/modules/test_boto_apigateway.py b/tests/unit/modules/test_boto_apigateway.py index 5f3d2a49822..e6bb33a47dc 100644 --- a/tests/unit/modules/test_boto_apigateway.py +++ b/tests/unit/modules/test_boto_apigateway.py @@ -23,6 +23,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_cloudtrail.py b/tests/unit/modules/test_boto_cloudtrail.py index de31ff955a0..3b6488b3129 100644 --- a/tests/unit/modules/test_boto_cloudtrail.py +++ b/tests/unit/modules/test_boto_cloudtrail.py @@ -22,6 +22,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_cloudtrail module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_cloudwatch_event.py b/tests/unit/modules/test_boto_cloudwatch_event.py index 82d158104aa..4d37747b8f7 100644 --- a/tests/unit/modules/test_boto_cloudwatch_event.py +++ b/tests/unit/modules/test_boto_cloudwatch_event.py @@ -22,6 +22,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import log = logging.getLogger(__name__) diff --git a/tests/unit/modules/test_boto_cognitoidentity.py b/tests/unit/modules/test_boto_cognitoidentity.py index 1e213a169ac..51ae9075a0b 100644 --- a/tests/unit/modules/test_boto_cognitoidentity.py +++ b/tests/unit/modules/test_boto_cognitoidentity.py @@ -21,6 +21,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_elasticsearch_domain.py b/tests/unit/modules/test_boto_elasticsearch_domain.py index 5c5845aa25b..e0329df5cec 100644 --- a/tests/unit/modules/test_boto_elasticsearch_domain.py +++ b/tests/unit/modules/test_boto_elasticsearch_domain.py @@ -21,6 +21,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_iot.py b/tests/unit/modules/test_boto_iot.py index 7c96244ce08..8c61d86dd9b 100644 --- a/tests/unit/modules/test_boto_iot.py +++ b/tests/unit/modules/test_boto_iot.py @@ -23,6 +23,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_iot module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_lambda.py b/tests/unit/modules/test_boto_lambda.py index d32dc9345b6..157e559207d 100644 --- a/tests/unit/modules/test_boto_lambda.py +++ b/tests/unit/modules/test_boto_lambda.py @@ -26,6 +26,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module # the boto_lambda module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_s3_bucket.py b/tests/unit/modules/test_boto_s3_bucket.py index 8e418a8293c..90d868d1141 100644 --- a/tests/unit/modules/test_boto_s3_bucket.py +++ b/tests/unit/modules/test_boto_s3_bucket.py @@ -22,6 +22,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_s3_bucket module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py index 7e72d07b8e7..2fee41f8bd9 100644 --- a/tests/unit/modules/test_virt.py +++ b/tests/unit/modules/test_virt.py @@ -2,32 +2,27 @@ virt execution module unit tests """ -# pylint: disable=3rd-party-module-not-gated - - import datetime import os import shutil import tempfile import xml.etree.ElementTree as ET +import pytest + import salt.config import salt.modules.config as config import salt.modules.virt as virt import salt.syspaths import salt.utils.yaml from salt.exceptions import CommandExecutionError, SaltInvocationError - -# pylint: disable=import-error from tests.support.helpers import dedent from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase -# pylint: disable=invalid-name,protected-access,attribute-defined-outside-init,too-many-public-methods,unused-argument - -class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors +class LibvirtMock(MagicMock): """ Libvirt library mock """ @@ -1882,6 +1877,7 @@ class VirtTestCase(TestCase, LoaderModuleMockMixin): ], ) + @pytest.mark.skip_on_fips_enabled_platform def test_init(self): """ Test init() function diff --git a/tests/unit/modules/test_zcbuildout.py b/tests/unit/modules/test_zcbuildout.py index ac98435ffa0..db7a862f727 100644 --- a/tests/unit/modules/test_zcbuildout.py +++ b/tests/unit/modules/test_zcbuildout.py @@ -20,12 +20,13 @@ from tests.support.runtests import RUNTIME_VARS from tests.support.unit import TestCase pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_windows( reason=( "Special steps are required for proper SSL validation because " "`easy_install` is too old(and deprecated)." ) - ) + ), ] KNOWN_VIRTUALENV_BINARY_NAMES = ( diff --git a/tests/unit/states/test_boto_apigateway.py b/tests/unit/states/test_boto_apigateway.py index 51c85d6058a..7cf95a43442 100644 --- a/tests/unit/states/test_boto_apigateway.py +++ b/tests/unit/states/test_boto_apigateway.py @@ -28,6 +28,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/states/test_boto_cognitoidentity.py b/tests/unit/states/test_boto_cognitoidentity.py index 4354df0546f..f84a055dd2d 100644 --- a/tests/unit/states/test_boto_cognitoidentity.py +++ b/tests/unit/states/test_boto_cognitoidentity.py @@ -25,6 +25,10 @@ try: except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/states/test_zcbuildout.py b/tests/unit/states/test_zcbuildout.py index b5f919ac6b2..7cafbba6a62 100644 --- a/tests/unit/states/test_zcbuildout.py +++ b/tests/unit/states/test_zcbuildout.py @@ -11,12 +11,13 @@ from tests.support.runtests import RUNTIME_VARS from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_windows( reason=( "Special steps are required for proper SSL validation because " "`easy_install` is too old(and deprecated)." ) - ) + ), ] diff --git a/tests/unit/utils/test_boto3mod.py b/tests/unit/utils/test_boto3mod.py index 74f6478e272..0a9509ab598 100644 --- a/tests/unit/utils/test_boto3mod.py +++ b/tests/unit/utils/test_boto3mod.py @@ -24,6 +24,10 @@ except ImportError: REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + @pytest.mark.skipif(HAS_BOTO3 is False, reason="The boto module must be installed.") @pytest.mark.skipif( diff --git a/tests/unit/utils/test_botomod.py b/tests/unit/utils/test_botomod.py index bf3ca37a837..3e67cbec698 100644 --- a/tests/unit/utils/test_botomod.py +++ b/tests/unit/utils/test_botomod.py @@ -53,6 +53,11 @@ except ImportError: return stub_function +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + + required_boto_version = "2.0.0" required_boto3_version = "1.2.1" region = "us-east-1" diff --git a/tests/unit/utils/test_find.py b/tests/unit/utils/test_find.py index bc81c48554d..1960d4a3510 100644 --- a/tests/unit/utils/test_find.py +++ b/tests/unit/utils/test_find.py @@ -332,6 +332,7 @@ class TestPrintOption(TestCase): option = salt.utils.find.PrintOption("print", "path user") self.assertEqual(option.requires(), salt.utils.find._REQUIRES_STAT) + @pytest.mark.skip_on_fips_enabled_platform def test_print_option_execute(self): hello_file = os.path.join(self.tmpdir, "hello.txt") with salt.utils.files.fopen(hello_file, "w") as fp_: diff --git a/tests/unit/utils/test_hashutils.py b/tests/unit/utils/test_hashutils.py index 5cf11c114ef..b9a685957a5 100644 --- a/tests/unit/utils/test_hashutils.py +++ b/tests/unit/utils/test_hashutils.py @@ -1,3 +1,5 @@ +import pytest + import salt.utils.hashutils from tests.support.unit import TestCase @@ -87,6 +89,7 @@ class HashutilsTestCase(TestCase): self.bytes, ) + @pytest.mark.skip_on_fips_enabled_platform def test_md5_digest(self): """ Ensure that this function converts the value passed to bytes before From a09afcba6824e9884461041cf4336bd4a80212ad Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 7 Nov 2023 12:54:27 +0000 Subject: [PATCH 102/196] Use `-eq 0` instead of `== 0` in shell script logic Signed-off-by: Pedro Algarvio --- .../integration/ssh/test_pre_flight.py | 72 +++++++++---------- 1 file changed, 32 insertions(+), 40 deletions(-) diff --git a/tests/pytests/integration/ssh/test_pre_flight.py b/tests/pytests/integration/ssh/test_pre_flight.py index 09c65d29430..c2fc14094e8 100644 --- a/tests/pytests/integration/ssh/test_pre_flight.py +++ b/tests/pytests/integration/ssh/test_pre_flight.py @@ -19,7 +19,9 @@ from saltfactories.utils import random_string import salt.utils.files -pytestmark = pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows") +pytestmark = [ + pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows"), +] def _custom_roster(roster_file, roster_data): @@ -33,33 +35,39 @@ def _custom_roster(roster_file, roster_data): @pytest.fixture def _create_roster(salt_ssh_roster_file, tmp_path): - ret = {} - ret["roster"] = salt_ssh_roster_file - ret["data"] = {"ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh")} - ret["test_script"] = str(tmp_path / "test-pre-flight-script-worked.txt") - ret["thin_dir"] = tmp_path / "thin_dir" + thin_dir = tmp_path / "thin-dir" + ret = { + "roster": salt_ssh_roster_file, + "data": { + "ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh"), + }, + "test_script": str(tmp_path / "test-pre-flight-script-worked.txt"), + "thin_dir": str(thin_dir), + } with salt.utils.files.fopen(salt_ssh_roster_file, "r") as fp: data = salt.utils.yaml.safe_load(fp) + pre_flight_script = ret["data"]["ssh_pre_flight"] data["localhost"]["ssh_pre_flight"] = pre_flight_script - data["localhost"]["thin_dir"] = str(ret["thin_dir"]) + data["localhost"]["thin_dir"] = ret["thin_dir"] with salt.utils.files.fopen(salt_ssh_roster_file, "w") as fp: yaml.safe_dump(data, fp) with salt.utils.files.fopen(pre_flight_script, "w") as fp: fp.write("touch {}".format(ret["test_script"])) - yield ret - if ret["thin_dir"].exists(): - shutil.rmtree(ret["thin_dir"]) + try: + yield ret + finally: + if thin_dir.exists(): + shutil.rmtree(thin_dir) @pytest.mark.slow_test def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster): """ - test ssh when ssh_pre_flight is set - ensure the script runs successfully + test ssh when ssh_pre_flight is set ensure the script runs successfully """ ret = salt_ssh_cli.run("test.ping") assert ret.returncode == 0 @@ -70,8 +78,7 @@ def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster): @pytest.mark.slow_test def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster): """ - test ssh when --pre-flight is passed to salt-ssh - to ensure the script runs successfully + test ssh when --pre-flight is passed to salt-ssh to ensure the script runs successfully """ # make sure we previously ran a command so the thin dir exists ret = salt_ssh_cli.run("test.ping") @@ -85,10 +92,7 @@ def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster): assert not pathlib.Path(_create_roster["test_script"]).exists() # Now ensure - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 assert pathlib.Path(_create_roster["test_script"]).exists() @@ -115,18 +119,15 @@ def test_ssh_run_pre_flight_args(salt_ssh_cli, _create_roster): assert ret.returncode == 0 assert test_script_1.exists() assert test_script_2.exists() - pathlib.Path(test_script_1).unlink() - pathlib.Path(test_script_2).unlink() + test_script_1.unlink() + test_script_2.unlink() ret = salt_ssh_cli.run("test.ping") assert ret.returncode == 0 assert not test_script_1.exists() assert not test_script_2.exists() - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 assert test_script_1.exists() assert test_script_2.exists() @@ -166,17 +167,14 @@ def test_ssh_run_pre_flight_args_prevent_injection( test_script_2.unlink() assert not injected_file.is_file() - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 assert test_script_1.exists() assert test_script_2.exists() - assert not pathlib.Path( - injected_file - ).is_file(), "File injection suceeded. This shouldn't happend" + assert ( + not injected_file.is_file() + ), "File injection suceeded. This shouldn't happend" @pytest.mark.flaky(max_runs=4) @@ -189,10 +187,7 @@ def test_ssh_run_pre_flight_failure(salt_ssh_cli, _create_roster): with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp_: fp_.write("exit 2") - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.data["retcode"] == 2 @@ -255,7 +250,7 @@ def test_ssh_pre_flight_perms(salt_ssh_cli, caplog, _create_roster, account): x=1 while [ $x -le 200000 ]; do SCRIPT=`bash {str(tmp_preflight)} 2> /dev/null; echo $?` - if [ ${{SCRIPT}} == 0 ]; then + if [ ${{SCRIPT}} -eq 0 ]; then break fi x=$(( $x + 1 )) @@ -301,10 +296,7 @@ def test_ssh_run_pre_flight_target_file_perms(salt_ssh_cli, _create_roster, tmp_ """ ) - ret = salt_ssh_cli.run( - "test.ping", - "--pre-flight", - ) + ret = salt_ssh_cli.run("test.ping", "--pre-flight") assert ret.returncode == 0 with salt.utils.files.fopen(perms_file) as fp: data = fp.read() From c13898620ae06eb1ae1e984efb764554c304d59b Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 8 Nov 2023 17:16:04 +0000 Subject: [PATCH 103/196] Let's just skip on Aarch64 instead Signed-off-by: Pedro Algarvio --- tests/pytests/integration/ssh/test_saltcheck.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/tests/pytests/integration/ssh/test_saltcheck.py b/tests/pytests/integration/ssh/test_saltcheck.py index a4cd6f3d8e0..a19fe9f1270 100644 --- a/tests/pytests/integration/ssh/test_saltcheck.py +++ b/tests/pytests/integration/ssh/test_saltcheck.py @@ -1,5 +1,4 @@ import pytest -from pytestskipmarkers.utils import platform pytestmark = [ pytest.mark.slow_test, @@ -7,12 +6,6 @@ pytestmark = [ ] -@pytest.fixture -def _skip_on_fips_and_arm64(grains): - if platform.is_fips_enabled() and grains["cpuarch"] == "aarch64": - pytest.skip("Test cannot run on a FIPS enabled platform") - - def test_saltcheck_run_test(salt_ssh_cli): """ test saltcheck.run_test with salt-ssh @@ -30,7 +23,7 @@ def test_saltcheck_run_test(salt_ssh_cli): assert ret.data["status"] == "Pass" -@pytest.mark.usefixtures("_skip_on_fips_and_arm64") +@pytest.mark.skip_on_aarch64 def test_saltcheck_state(salt_ssh_cli): """ saltcheck.run_state_tests From 2fb207753e348fbd0d33444c58ca5113ea972193 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 9 Nov 2023 14:36:37 +0000 Subject: [PATCH 104/196] Add a few more platform slugs which will get tested with the TCP transport Signed-off-by: Pedro Algarvio --- tools/ci.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 59ef3e38db9..7e5d098e446 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -660,6 +660,9 @@ def matrix( if transport == "tcp": if distro_slug not in ( "centosstream-9", + "centosstream-9-arm64", + "photonos-5", + "photonos-5-arm64", "ubuntu-22.04", "ubuntu-22.04-arm64", ): @@ -683,19 +686,15 @@ def matrix( "test-group-count": splits, } ) - if ( - fips is True - and transport != "tcp" - and distro_slug.startswith(("photonos-4", "photonos-5")) + if fips is True and distro_slug.startswith( + ("photonos-4", "photonos-5") ): # Repeat the last one, but with fips _matrix.append({"fips": "fips", **_matrix[-1]}) else: _matrix.append({"transport": transport, "tests-chunk": chunk}) - if ( - fips is True - and transport != "tcp" - and distro_slug.startswith(("photonos-4", "photonos-5")) + if fips is True and distro_slug.startswith( + ("photonos-4", "photonos-5") ): # Repeat the last one, but with fips _matrix.append({"fips": "fips", **_matrix[-1]}) From 238a744bcb899ce00a920c90dc24d1e00a1e8072 Mon Sep 17 00:00:00 2001 From: Joe Groocock Date: Mon, 18 Sep 2023 12:29:22 +0100 Subject: [PATCH 105/196] Fix vt.Terminal failing test: test_log_sanitize Fixes failing test added in a09b4f445052be66f0ac53fd01fa02bfa5b82ea6 We can't assume tests are run at debug level, so this ensures the test passes regardless of what logging level is currently set by capturing the output in caplog at DEBUG which stream_stdout/stream_stderr uses by default. Signed-off-by: Joe Groocock --- tests/pytests/unit/utils/test_vt.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tests/pytests/unit/utils/test_vt.py b/tests/pytests/unit/utils/test_vt.py index 438a6eb09c0..c31b25e623c 100644 --- a/tests/pytests/unit/utils/test_vt.py +++ b/tests/pytests/unit/utils/test_vt.py @@ -1,3 +1,4 @@ +import logging import os import signal @@ -43,10 +44,13 @@ def test_log_sanitize(test_cmd, caplog): cmd, log_stdout=True, log_stderr=True, + log_stdout_level="debug", + log_stderr_level="debug", log_sanitize=password, stream_stdout=False, stream_stderr=False, ) - ret = term.recv() + with caplog.at_level(logging.DEBUG): + ret = term.recv() assert password not in caplog.text assert "******" in caplog.text From 6374c0fbf466f5d675d78b508a7b830732efc4e5 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 15 Nov 2023 02:32:12 -0700 Subject: [PATCH 106/196] Bump relenv to 0.14.2 --- .github/workflows/ci.yml | 28 ++++++++++++++-------------- .github/workflows/nightly.yml | 28 ++++++++++++++-------------- .github/workflows/scheduled.yml | 28 ++++++++++++++-------------- .github/workflows/staging.yml | 28 ++++++++++++++-------------- cicd/shared-gh-workflows-context.yml | 2 +- 5 files changed, 57 insertions(+), 57 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 902076cea4e..ca3f32c1086 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -444,7 +444,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -458,7 +458,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -472,7 +472,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -488,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -504,7 +504,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -520,7 +520,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -532,7 +532,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -545,7 +545,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -558,7 +558,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -571,7 +571,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -584,7 +584,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -597,7 +597,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -610,7 +610,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -623,7 +623,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index d3c963f61e1..62fa68b30c0 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -493,7 +493,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -507,7 +507,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -521,7 +521,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -537,7 +537,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -553,7 +553,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -569,7 +569,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -581,7 +581,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -594,7 +594,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -607,7 +607,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -620,7 +620,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -633,7 +633,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: nightly @@ -649,7 +649,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: nightly @@ -665,7 +665,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: nightly @@ -681,7 +681,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: nightly diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index a093a8fdfa8..6d43a7a5c8c 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -478,7 +478,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -492,7 +492,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -506,7 +506,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -522,7 +522,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -538,7 +538,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -554,7 +554,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -566,7 +566,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -579,7 +579,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -592,7 +592,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -605,7 +605,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -618,7 +618,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -631,7 +631,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -644,7 +644,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -657,7 +657,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c89eebc1032..c185f9cb127 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -488,7 +488,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-windows: @@ -502,7 +502,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-deps-onedir-macos: @@ -516,7 +516,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-linux: @@ -532,7 +532,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-windows: @@ -548,7 +548,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-salt-onedir-macos: @@ -564,7 +564,7 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" build-rpm-pkgs-onedir: @@ -576,7 +576,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -589,7 +589,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -602,7 +602,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -615,7 +615,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -628,7 +628,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: staging @@ -644,7 +644,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: staging @@ -660,7 +660,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" environment: staging @@ -676,7 +676,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - relenv-version: "0.14.1" + relenv-version: "0.14.2" python-version: "3.10.13" source: "src" environment: staging diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml index ca40fb1c643..c2691494b37 100644 --- a/cicd/shared-gh-workflows-context.yml +++ b/cicd/shared-gh-workflows-context.yml @@ -1,3 +1,3 @@ nox_version: "2022.8.7" python_version: "3.10.13" -relenv_version: "0.14.1" +relenv_version: "0.14.2" From 6034b9841368a0035a8370c9e7e6e69faf9872ed Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Wed, 15 Nov 2023 02:33:25 -0700 Subject: [PATCH 107/196] Update changelog --- changelog/65316.fixed.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog/65316.fixed.md b/changelog/65316.fixed.md index 4b1d151abef..f5f9e197e30 100644 --- a/changelog/65316.fixed.md +++ b/changelog/65316.fixed.md @@ -1,4 +1,4 @@ -Uprade relenv to 0.14.1 +Uprade relenv to 0.14.2 - Update openssl to address CVE-2023-5363. - Fix bug in openssl setup when openssl binary can't be found. - Add M1 mac support. From 109a62c7796599a500308e5f1b4bb8a16887f0bb Mon Sep 17 00:00:00 2001 From: nicholasmhughes Date: Tue, 14 Nov 2023 16:25:13 -0500 Subject: [PATCH 108/196] fixes saltstack/salt#65501 file.comment ignore_missing not working with multiline char (cherry picked from commit c5fbfa1fe74da3aa6a736653635cb857a74e8bc0) # Conflicts: # salt/states/file.py --- changelog/65501.fixed.md | 1 + salt/states/file.py | 36 +++++++++---------- .../functional/states/file/test_comment.py | 15 +++++++- 3 files changed, 33 insertions(+), 19 deletions(-) create mode 100644 changelog/65501.fixed.md diff --git a/changelog/65501.fixed.md b/changelog/65501.fixed.md new file mode 100644 index 00000000000..31592c67e70 --- /dev/null +++ b/changelog/65501.fixed.md @@ -0,0 +1 @@ +Fix file.comment ignore_missing not working with multiline char diff --git a/salt/states/file.py b/salt/states/file.py index 9508a4c2faf..9fce51867b9 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -439,7 +439,7 @@ def _gen_recurse_managed_files( exclude_pat=None, maxdepth=None, include_empty=False, - **kwargs + **kwargs, ): """ Generate the list of files managed by a recurse state @@ -1342,7 +1342,7 @@ def hardlink( user=None, group=None, dir_mode=None, - **kwargs + **kwargs, ): """ Create a hard link @@ -1548,7 +1548,7 @@ def symlink( atomic=False, disallow_copy_and_unlink=False, inherit_user_and_group=False, - **kwargs + **kwargs, ): """ Create a symbolic link (symlink, soft link) @@ -1986,7 +1986,7 @@ def tidied( age_size_logical_operator="OR", age_size_only=None, rmlinks=True, - **kwargs + **kwargs, ): """ .. versionchanged:: 3005,3006.0 @@ -2305,7 +2305,7 @@ def managed( win_perms_reset=False, verify_ssl=True, use_etag=False, - **kwargs + **kwargs, ): r""" Manage a given file, this function allows for a file to be downloaded from @@ -3207,7 +3207,7 @@ def managed( serange=serange, verify_ssl=verify_ssl, follow_symlinks=follow_symlinks, - **kwargs + **kwargs, ) if salt.utils.platform.is_windows(): @@ -3270,7 +3270,7 @@ def managed( skip_verify, verify_ssl=verify_ssl, use_etag=use_etag, - **kwargs + **kwargs, ) except Exception as exc: # pylint: disable=broad-except ret["changes"] = {} @@ -3325,7 +3325,7 @@ def managed( setype=setype, serange=serange, use_etag=use_etag, - **kwargs + **kwargs, ) except Exception as exc: # pylint: disable=broad-except ret["changes"] = {} @@ -3404,7 +3404,7 @@ def managed( setype=setype, serange=serange, use_etag=use_etag, - **kwargs + **kwargs, ) except Exception as exc: # pylint: disable=broad-except ret["changes"] = {} @@ -3492,7 +3492,7 @@ def directory( win_deny_perms=None, win_inheritance=True, win_perms_reset=False, - **kwargs + **kwargs, ): r""" Ensure that a named directory is present and has the right perms @@ -4206,7 +4206,7 @@ def recurse( win_perms=None, win_deny_perms=None, win_inheritance=True, - **kwargs + **kwargs, ): """ Recurse through a subdirectory on the master and copy said subdirectory @@ -4577,7 +4577,7 @@ def recurse( context=context, defaults=defaults, backup=backup, - **pass_kwargs + **pass_kwargs, ) merge_ret(path, _ret) @@ -6158,7 +6158,7 @@ def comment(name, regex, char="#", backup=".bak", ignore_missing=False): # remove (?i)-like flags, ^ and $ unanchor_regex = re.sub(r"^(\(\?[iLmsux]\))?\^?(.*?)\$?$", r"\2", regex) - uncomment_regex = r"^(?!\s*{}).*".format(char) + unanchor_regex + uncomment_regex = rf"^(?!\s*{char})\s*" + unanchor_regex comment_regex = char + unanchor_regex # Make sure the pattern appears in the file before continuing @@ -6902,7 +6902,7 @@ def patch( reject_file=None, strip=None, saltenv=None, - **kwargs + **kwargs, ): """ Ensure that a patch has been applied to the specified file or directory @@ -7400,7 +7400,7 @@ def copy_( mode=None, dir_mode=None, subdir=False, - **kwargs + **kwargs, ): """ If the file defined by the ``source`` option exists on the minion, copy it @@ -7842,7 +7842,7 @@ def serialize( serializer=None, serializer_opts=None, deserializer_opts=None, - **kwargs + **kwargs, ): """ Serializes dataset and store it into managed file. Useful for sharing @@ -8178,7 +8178,7 @@ def serialize( saltenv=__env__, contents=contents, skip_verify=False, - **kwargs + **kwargs, ) if ret["changes"]: @@ -8559,7 +8559,7 @@ def shortcut( backupname=None, makedirs=False, user=None, - **kwargs + **kwargs, ): """ Create a Windows shortcut diff --git a/tests/pytests/functional/states/file/test_comment.py b/tests/pytests/functional/states/file/test_comment.py index 377e6b1b0e6..b7a7c8a7c95 100644 --- a/tests/pytests/functional/states/file/test_comment.py +++ b/tests/pytests/functional/states/file/test_comment.py @@ -106,7 +106,7 @@ def test_issue_2401_file_comment(modules, tmp_path): tmp_file.write_text("hello\nworld\n") # create the sls template template_lines = [ - "{}:".format(tmp_file), + f"{tmp_file}:", " file.comment:", " - regex: ^world", ] @@ -122,3 +122,16 @@ def test_issue_2401_file_comment(modules, tmp_path): for state_run in ret: assert state_run.result is True assert "Pattern already commented" in state_run.comment + + +def test_issue_65501(file, tmp_path): + tmp_file = tmp_path / "issue-65501.txt" + tmp_file.write_text("first\n#PermitRootLogin prohibit-password\nlast") + ret = file.comment( + name=str(tmp_file), + regex="^PermitRootLogin[ \t]+.*$", + char="# NEXT LINE COMMENT SALTSTACK openssh-server_comment_permitrootlogin_sshd_config\n# ", + ignore_missing=True, + ) + assert ret.result is True + assert ret.comment == "Pattern not found and ignore_missing set to True" From d76b82558ac65247d42a5d1835429d126c080ceb Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 15 Sep 2023 09:47:02 -0400 Subject: [PATCH 109/196] Add jobs for rpm distros to produce arm64 repos explicitly and remove the condition that changes aarch64 to arm64 --- .github/workflows/nightly.yml | 29 ++++++++++++++++++- .github/workflows/staging.yml | 29 ++++++++++++++++++- .../templates/build-rpm-repo.yml.jinja | 11 ++++++- tools/pkg/repo/create.py | 4 --- 4 files changed, 66 insertions(+), 7 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 62fa68b30c0..e4e4f890efc 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -3126,6 +3126,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: amazon + version: "2" + arch: arm64 + - distro: amazon version: "2" arch: aarch64 - pkg-type: rpm @@ -3142,6 +3145,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: redhat + version: "7" + arch: arm64 + - distro: redhat version: "7" arch: aarch64 - pkg-type: rpm @@ -3150,6 +3156,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: redhat + version: "8" + arch: arm64 + - distro: redhat version: "8" arch: aarch64 - pkg-type: rpm @@ -3158,6 +3167,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: redhat + version: "9" + arch: arm64 + - distro: redhat version: "9" arch: aarch64 - pkg-type: rpm @@ -3166,6 +3178,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: fedora + version: "36" + arch: arm64 + - distro: fedora version: "36" arch: aarch64 - pkg-type: rpm @@ -3174,6 +3189,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: fedora + version: "37" + arch: arm64 + - distro: fedora version: "37" arch: aarch64 - pkg-type: rpm @@ -3182,6 +3200,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: fedora + version: "38" + arch: arm64 + - distro: fedora version: "38" arch: aarch64 - pkg-type: rpm @@ -3190,6 +3211,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: photon + version: "3" + arch: arm64 + - distro: photon version: "3" arch: aarch64 - pkg-type: rpm @@ -3198,6 +3222,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: photon + version: "4" + arch: arm64 + - distro: photon version: "4" arch: aarch64 - pkg-type: rpm @@ -3229,7 +3256,7 @@ jobs: - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c185f9cb127..c09bbbcd56a 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2941,6 +2941,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: amazon + version: "2" + arch: arm64 + - distro: amazon version: "2" arch: aarch64 - pkg-type: rpm @@ -2957,6 +2960,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: redhat + version: "7" + arch: arm64 + - distro: redhat version: "7" arch: aarch64 - pkg-type: rpm @@ -2965,6 +2971,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: redhat + version: "8" + arch: arm64 + - distro: redhat version: "8" arch: aarch64 - pkg-type: rpm @@ -2973,6 +2982,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: redhat + version: "9" + arch: arm64 + - distro: redhat version: "9" arch: aarch64 - pkg-type: rpm @@ -2981,6 +2993,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: fedora + version: "36" + arch: arm64 + - distro: fedora version: "36" arch: aarch64 - pkg-type: rpm @@ -2989,6 +3004,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: fedora + version: "37" + arch: arm64 + - distro: fedora version: "37" arch: aarch64 - pkg-type: rpm @@ -2997,6 +3015,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: fedora + version: "38" + arch: arm64 + - distro: fedora version: "38" arch: aarch64 - pkg-type: rpm @@ -3005,6 +3026,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: photon + version: "3" + arch: arm64 + - distro: photon version: "3" arch: aarch64 - pkg-type: rpm @@ -3013,6 +3037,9 @@ jobs: arch: x86_64 - pkg-type: rpm distro: photon + version: "4" + arch: arm64 + - distro: photon version: "4" arch: aarch64 - pkg-type: rpm @@ -3044,7 +3071,7 @@ jobs: - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 208f2096301..46c427c09b8 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -5,24 +5,33 @@ include: <%- for distro, version, arch in ( ("amazon", "2", "x86_64"), + ("amazon", "2", "arm64"), ("amazon", "2", "aarch64"), ("amazon", "2023", "x86_64"), ("amazon", "2023", "aarch64"), ("redhat", "7", "x86_64"), + ("redhat", "7", "arm64"), ("redhat", "7", "aarch64"), ("redhat", "8", "x86_64"), + ("redhat", "8", "arm64"), ("redhat", "8", "aarch64"), ("redhat", "9", "x86_64"), + ("redhat", "9", "arm64"), ("redhat", "9", "aarch64"), ("fedora", "36", "x86_64"), + ("fedora", "36", "arm64"), ("fedora", "36", "aarch64"), ("fedora", "37", "x86_64"), + ("fedora", "37", "arm64"), ("fedora", "37", "aarch64"), ("fedora", "38", "x86_64"), + ("fedora", "38", "arm64"), ("fedora", "38", "aarch64"), ("photon", "3", "x86_64"), + ("photon", "3", "arm64"), ("photon", "3", "aarch64"), ("photon", "4", "x86_64"), + ("photon", "4", "arm64"), ("photon", "4", "aarch64"), ("photon", "5", "x86_64"), ("photon", "5", "aarch64"), @@ -53,7 +62,7 @@ - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index b1cc0471f9e..8dfbf9dc459 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -396,10 +396,6 @@ def rpm( ctx.error(f"Support for {display_name} is missing.") ctx.exit(1) - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - ctx.info("Creating repository directory structure ...") create_repo_path = create_top_level_repo_path( ctx, From ed2ecf48c6b3911cb761524cd0f8a3ea82c4dee3 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 18 Sep 2023 16:49:44 -0400 Subject: [PATCH 110/196] Remove condition to switch from `arm64` to `aarch64` in the download tests --- pkg/tests/download/test_pkg_download.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/pkg/tests/download/test_pkg_download.py b/pkg/tests/download/test_pkg_download.py index c3cd24a8e66..81542ec4583 100644 --- a/pkg/tests/download/test_pkg_download.py +++ b/pkg/tests/download/test_pkg_download.py @@ -255,8 +255,6 @@ def setup_redhat_family( repo_subpath, ): arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" - if arch == "aarch64": - arch = "arm64" if repo_subpath == "minor": repo_url_base = ( From 527cc3f344dbc22786092ec6000e515360434bf2 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 25 Sep 2023 18:34:39 -0400 Subject: [PATCH 111/196] Scrape the buckets for the package files to determine what releases we can test upgrade and downgrades for a given operating system --- tools/ci.py | 208 +++++++++++++++++++++++++--------------------------- 1 file changed, 98 insertions(+), 110 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 7e5d098e446..3a7cbfc61e3 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -24,6 +24,17 @@ if sys.version_info < (3, 11): else: from typing import NotRequired, TypedDict # pylint: disable=no-name-in-module +try: + import boto3 +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + log = logging.getLogger(__name__) # Define the command group @@ -743,49 +754,6 @@ def pkg_matrix( ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") if TYPE_CHECKING: assert testing_releases - _matrix = [] - sessions = [ - "install", - ] - # OSs that where never included in 3005 - # We cannot test an upgrade for this OS on this version - not_3005 = ["amazonlinux-2-arm64", "photonos-5", "photonos-5-arm64"] - # OSs that where never included in 3006 - # We cannot test an upgrade for this OS on this version - not_3006 = ["photonos-5", "photonos-5-arm64"] - if ( - distro_slug - not in [ - "amazon-2023", - "amazon-2023-arm64", - "debian-11-arm64", - # TODO: remove debian 12 once debian 12 pkgs are released - "debian-12-arm64", - "debian-12", - # TODO: remove amazon 2023 once amazon 2023 pkgs are released - "amazonlinux-2023", - "amazonlinux-2023-arm64", - "ubuntu-20.04-arm64", - "ubuntu-22.04-arm64", - "photonos-3", - "photonos-3-arm64", - "photonos-4", - "photonos-4-arm64", - "photonos-5", - "photonos-5-arm64", - "amazonlinux-2-arm64", - "amazonlinux-2023", - "amazonlinux-2023-arm64", - ] - and pkg_type != "MSI" - ): - # These OS's never had arm64 packages built for them - # with the tiamat onedir packages. - # we will need to ensure when we release 3006.0 - # we allow for 3006.0 jobs to run, because then - # we will have arm64 onedir packages to upgrade from - sessions.append("upgrade") - sessions.append("downgrade") still_testing_3005 = False for release_version in testing_releases: @@ -797,78 +765,98 @@ def pkg_matrix( if still_testing_3005 is False: ctx.error( f"No longer testing 3005.x releases please update {__file__} " - "and remove this error and the logic above the error" + "and remove this error and the logic above the error. There may " + "be other places that need code removed as well." ) ctx.exit(1) - # TODO: Remove this block when we reach version 3009.0, we will no longer be testing upgrades from classic packages - if ( - distro_slug - not in [ - "amazon-2023", - "amazon-2023-arm64", - "centosstream-9", - "debian-11-arm64", - "debian-12-arm64", - "debian-12", - "amazonlinux-2023", - "amazonlinux-2023-arm64", - "ubuntu-22.04", - "ubuntu-22.04-arm64", - "photonos-3", - "photonos-3-arm64", - "photonos-4", - "photonos-4-arm64", - "photonos-5", - "photonos-5-arm64", - ] - and pkg_type != "MSI" - ): - # Packages for these OSs where never built for classic previously - sessions.append("upgrade-classic") - sessions.append("downgrade-classic") + adj_versions = [] + for ver in testing_releases: + if ver < tools.utils.Version("3006.0"): + adj_versions.append((ver, "classic")) + adj_versions.append((ver, "tiamat")) + else: + adj_versions.append((ver, "relenv")) + ctx.info(f"Will look for the following versions: {adj_versions}") - for session in sessions: - versions: list[str | None] = [None] - if session in ("upgrade", "downgrade"): - versions = [str(version) for version in testing_releases] - elif session in ("upgrade-classic", "downgrade-classic"): - versions = [ - str(version) - for version in testing_releases - if version < tools.utils.Version("3006.0") - ] - for version in versions: - if ( - version - and distro_slug in not_3005 - and version < tools.utils.Version("3006.0") - ): - # We never build packages for these OSs in 3005 - continue - elif ( - version - and distro_slug in not_3006 - and version < tools.utils.Version("3007.0") - ): - # We never build packages for these OSs in 3006 - continue - if ( - version - and distro_slug.startswith("amazonlinux-2023") - and version < tools.utils.Version("3006.6") - ): - # We never build packages for AmazonLinux 2023 prior to 3006.5 - continue - _matrix.append( - { - "tests-chunk": session, - "version": version, - } + # Filter out the prefixes to look under + if "macos-" in distro_slug: + # We don't have golden images for macos, handle these separately + prefixes = { + "classic": "osx/", + "tiamat": "salt/py3/macos/minor/", + "relenv": "salt/py3/macos/minor/", + } + else: + parts = distro_slug.split("-") + name = parts[0] + version = parts[1] + if name in ("debian", "ubuntu"): + arch = "amd64" + elif name in ("centos", "centosstream", "amazonlinux"): + arch = "x86_64" + if len(parts) > 2: + arch = parts[2] + if name == "amazonlinux": + name = "amazon" + if "centos" in name: + name = "redhat" + if name == "windows": + prefixes = { + "classic": "windows/", + "tiamat": "salt/py3/windows/minor", + "relenv": "salt/py3/windows/minor", + } + else: + prefixes = { + "classic": f"py3/{name}/{version}/{arch}/", + "tiamat": f"salt/py3/{name}/{version}/{arch}/minor/", + "relenv": f"salt/py3/{name}/{version}/{arch}/minor/", + } + + s3 = boto3.client("s3") + paginator = s3.get_paginator("list_objects_v2") + matrix = [ + { + "test-chunk": "install", + "version": None, + } + ] + + for version, backend in adj_versions: + prefix = prefixes[backend] + # Using a paginator allows us to list recursively and avoid the item limit + # TODO: Swap this for the prod bucket before merge + page_iterator = paginator.paginate( + Bucket="salt-project-test-salt-artifacts-release", Prefix=prefix + ) + # Uses a jmespath expression to test if the wanted version is in any of the filenames + key_filter = f"Contents[?contains(Key, '{version}')][]" + if pkg_type == "MSI": + # TODO: Add this back when we add MSI upgrade and downgrade tests + # key_filter = f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.msi')]" + continue + elif pkg_type == "NSIS": + key_filter = ( + f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.exe')]" ) - if fips is True and distro_slug.startswith(("photonos-4", "photonos-5")): - # Repeat the last one, but with fips - _matrix.append({"fips": "fips", **_matrix[-1]}) + # objects = list(page_iterator.search(f"Contents[?contains(Key, '{key_filter}')][]")) + objects = page_iterator.search(key_filter) + # ctx.info(objects) + try: + first = next(objects) + ctx.info(f"Found {version} ({backend}) for {distro_slug}: {first['Key']}") + for session in ("upgrade", "downgrade"): + matrix.append( + { + "test-chunk": f"{session}-classic" + if backend == "classic" + else session, + "version": str(version), + } + ) + except StopIteration: + ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") ctx.info("Generated matrix:") ctx.print(_matrix, soft_wrap=True) From 3b3b9fbc49af1fd9b9302dd0de3041835d5ad906 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 25 Sep 2023 18:39:20 -0400 Subject: [PATCH 112/196] Adjust `tools ci pkg-matrix` to search in the right `aarch64` paths starting in `3007.0` --- tools/ci.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tools/ci.py b/tools/ci.py index 3a7cbfc61e3..bd7d94c0562 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -825,6 +825,9 @@ def pkg_matrix( for version, backend in adj_versions: prefix = prefixes[backend] + # TODO: Remove this after 3009.0 + if backend == "relenv" and version >= tools.utils.Version("3007.0"): + prefix.replace("/arm64/", "/aarch64/") # Using a paginator allows us to list recursively and avoid the item limit # TODO: Swap this for the prod bucket before merge page_iterator = paginator.paginate( From 398056dafba664edbb4c76a420d363b2bc2e9227 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 26 Sep 2023 12:20:10 -0400 Subject: [PATCH 113/196] Fix the logic around whether or not a version is available --- tools/ci.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index bd7d94c0562..819f1d75598 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -843,12 +843,12 @@ def pkg_matrix( key_filter = ( f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.exe')]" ) - # objects = list(page_iterator.search(f"Contents[?contains(Key, '{key_filter}')][]")) - objects = page_iterator.search(key_filter) - # ctx.info(objects) - try: - first = next(objects) - ctx.info(f"Found {version} ({backend}) for {distro_slug}: {first['Key']}") + objects = list(page_iterator.search(key_filter)) + # Testing using `any` because sometimes the paginator returns `[None]` + if any(objects): + ctx.info( + f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}" + ) for session in ("upgrade", "downgrade"): matrix.append( { @@ -858,7 +858,7 @@ def pkg_matrix( "version": str(version), } ) - except StopIteration: + else: ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") ctx.info("Generated matrix:") From 530a10849749810ad07d4ec0a5486139e8ae5600 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 26 Sep 2023 14:49:46 -0400 Subject: [PATCH 114/196] Handle `photonos` --- tools/ci.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci.py b/tools/ci.py index 819f1d75598..3892ab1e727 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -793,7 +793,7 @@ def pkg_matrix( version = parts[1] if name in ("debian", "ubuntu"): arch = "amd64" - elif name in ("centos", "centosstream", "amazonlinux"): + elif name in ("centos", "centosstream", "amazonlinux", "photonos"): arch = "x86_64" if len(parts) > 2: arch = parts[2] From b3df0c782a7dfdd5ad1f9100e2dfda95fd77b48b Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 26 Sep 2023 17:08:09 -0400 Subject: [PATCH 115/196] Run the `Generate Package Test Matrix` step on a self-hosted runner --- .github/workflows/test-packages-action-macos.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 378adf90d1c..208007cf304 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -70,7 +70,10 @@ jobs: generate-matrix: name: Generate Matrix - runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + runs-on: + - self-hosted + - linux + - x86_64 outputs: pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} steps: From 4ed2c97224d9b6bc93784ce147c056080f4c8c95 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 16 Oct 2023 13:16:46 -0400 Subject: [PATCH 116/196] Generate the GH workflows --- .github/workflows/nightly.yml | 27 ++++++++++++++++++--------- .github/workflows/staging.yml | 27 ++++++++++++++++++--------- 2 files changed, 36 insertions(+), 18 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e4e4f890efc..530ca29d661 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -3128,7 +3128,8 @@ jobs: distro: amazon version: "2" arch: arm64 - - distro: amazon + - pkg-type: rpm + distro: amazon version: "2" arch: aarch64 - pkg-type: rpm @@ -3147,7 +3148,8 @@ jobs: distro: redhat version: "7" arch: arm64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "7" arch: aarch64 - pkg-type: rpm @@ -3158,7 +3160,8 @@ jobs: distro: redhat version: "8" arch: arm64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "8" arch: aarch64 - pkg-type: rpm @@ -3169,7 +3172,8 @@ jobs: distro: redhat version: "9" arch: arm64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "9" arch: aarch64 - pkg-type: rpm @@ -3180,7 +3184,8 @@ jobs: distro: fedora version: "36" arch: arm64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "36" arch: aarch64 - pkg-type: rpm @@ -3191,7 +3196,8 @@ jobs: distro: fedora version: "37" arch: arm64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "37" arch: aarch64 - pkg-type: rpm @@ -3202,7 +3208,8 @@ jobs: distro: fedora version: "38" arch: arm64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "38" arch: aarch64 - pkg-type: rpm @@ -3213,7 +3220,8 @@ jobs: distro: photon version: "3" arch: arm64 - - distro: photon + - pkg-type: rpm + distro: photon version: "3" arch: aarch64 - pkg-type: rpm @@ -3224,7 +3232,8 @@ jobs: distro: photon version: "4" arch: arm64 - - distro: photon + - pkg-type: rpm + distro: photon version: "4" arch: aarch64 - pkg-type: rpm diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index c09bbbcd56a..91ec4dc11df 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2943,7 +2943,8 @@ jobs: distro: amazon version: "2" arch: arm64 - - distro: amazon + - pkg-type: rpm + distro: amazon version: "2" arch: aarch64 - pkg-type: rpm @@ -2962,7 +2963,8 @@ jobs: distro: redhat version: "7" arch: arm64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "7" arch: aarch64 - pkg-type: rpm @@ -2973,7 +2975,8 @@ jobs: distro: redhat version: "8" arch: arm64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "8" arch: aarch64 - pkg-type: rpm @@ -2984,7 +2987,8 @@ jobs: distro: redhat version: "9" arch: arm64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "9" arch: aarch64 - pkg-type: rpm @@ -2995,7 +2999,8 @@ jobs: distro: fedora version: "36" arch: arm64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "36" arch: aarch64 - pkg-type: rpm @@ -3006,7 +3011,8 @@ jobs: distro: fedora version: "37" arch: arm64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "37" arch: aarch64 - pkg-type: rpm @@ -3017,7 +3023,8 @@ jobs: distro: fedora version: "38" arch: arm64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "38" arch: aarch64 - pkg-type: rpm @@ -3028,7 +3035,8 @@ jobs: distro: photon version: "3" arch: arm64 - - distro: photon + - pkg-type: rpm + distro: photon version: "3" arch: aarch64 - pkg-type: rpm @@ -3039,7 +3047,8 @@ jobs: distro: photon version: "4" arch: arm64 - - distro: photon + - pkg-type: rpm + distro: photon version: "4" arch: aarch64 - pkg-type: rpm From b014a0c969028f6c98d96f4a1aa1f1e2283e3e1d Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 16 Oct 2023 14:29:32 -0400 Subject: [PATCH 117/196] It's `matrix`, not `_matrix` --- tools/ci.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 3892ab1e727..c280ba3a3c8 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -862,11 +862,11 @@ def pkg_matrix( ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") ctx.info("Generated matrix:") - ctx.print(_matrix, soft_wrap=True) + ctx.print(matrix, soft_wrap=True) if github_output is not None: with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"matrix={json.dumps(_matrix)}\n") + wfh.write(f"matrix={json.dumps(matrix)}\n") ctx.exit(0) From 5f31b3120e3c22ae328039ff444a79135b377954 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 18 Oct 2023 16:33:10 -0400 Subject: [PATCH 118/196] REVERT: Add temp 3007.0 release notes --- doc/topics/releases/3007.0.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 doc/topics/releases/3007.0.md diff --git a/doc/topics/releases/3007.0.md b/doc/topics/releases/3007.0.md new file mode 100644 index 00000000000..489a5b43d83 --- /dev/null +++ b/doc/topics/releases/3007.0.md @@ -0,0 +1,22 @@ +(release-3007.0)= +# Salt 3007.0 release notes + + + + + + + +## Changelog + +### Added +- These notes which will later disappear From 4d72d7c0523e0e3e5eecdb99af999c27a5837f24 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 18 Oct 2023 16:47:46 -0400 Subject: [PATCH 119/196] Revert "REVERT: Add temp 3007.0 release notes" This reverts commit 2c77a3788edac7f96ce0bf9aa07568d75d768b28. --- doc/topics/releases/3007.0.md | 22 ---------------------- 1 file changed, 22 deletions(-) delete mode 100644 doc/topics/releases/3007.0.md diff --git a/doc/topics/releases/3007.0.md b/doc/topics/releases/3007.0.md deleted file mode 100644 index 489a5b43d83..00000000000 --- a/doc/topics/releases/3007.0.md +++ /dev/null @@ -1,22 +0,0 @@ -(release-3007.0)= -# Salt 3007.0 release notes - - - - - - - -## Changelog - -### Added -- These notes which will later disappear From 9951e82121a17268f084893aa5dcab9a3dbb5463 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 19 Oct 2023 17:41:12 -0400 Subject: [PATCH 120/196] Adjust package download tests and also add a package test suite for an rpm arm distribution (centosstream-9-arm) --- .github/workflows/ci.yml | 23 +++++++++++++++++++ .github/workflows/nightly.yml | 23 +++++++++++++++++++ .github/workflows/scheduled.yml | 23 +++++++++++++++++++ .github/workflows/staging.yml | 23 +++++++++++++++++++ .../test-package-downloads-action.yml | 21 +++++++++++++++++ pkg/tests/support/helpers.py | 5 ++++ tools/pre_commit.py | 17 ++++++++++++++ 7 files changed, 135 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ca3f32c1086..3c93e9bc4a0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1444,6 +1444,28 @@ jobs: skip-junit-reports: ${{ github.event_name == 'pull_request' }} testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2920,6 +2942,7 @@ jobs: - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 530ca29d661..d888e64c5c1 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -1505,6 +1505,28 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -3777,6 +3799,7 @@ jobs: - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 6d43a7a5c8c..527d224cd74 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -1478,6 +1478,28 @@ jobs: skip-junit-reports: false testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -2956,6 +2978,7 @@ jobs: - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 91ec4dc11df..99a541e3e7b 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -1500,6 +1500,28 @@ jobs: skip-junit-reports: true testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + debian-10-pkg-tests: name: Debian 10 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} @@ -3695,6 +3717,7 @@ jobs: - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index b90e17f2d57..86bbb98ce0e 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -71,12 +71,18 @@ jobs: - distro-slug: almalinux-8-arm64 arch: aarch64 pkg-type: package + - distro-slug: almalinux-8-arm64 + arch: arm64 + pkg-type: package - distro-slug: almalinux-9 arch: x86_64 pkg-type: package - distro-slug: almalinux-9-arm64 arch: aarch64 pkg-type: package + - distro-slug: almalinux-9-arm64 + arch: arm64 + pkg-type: package - distro-slug: amazonlinux-2 arch: x86_64 pkg-type: package @@ -95,18 +101,27 @@ jobs: - distro-slug: centos-7-arm64 arch: aarch64 pkg-type: package + - distro-slug: centos-7-arm64 + arch: arm64 + pkg-type: package - distro-slug: centosstream-8 arch: x86_64 pkg-type: package - distro-slug: centosstream-8-arm64 arch: aarch64 pkg-type: package + - distro-slug: centosstream-8-arm64 + arch: arm64 + pkg-type: package - distro-slug: centosstream-9 arch: x86_64 pkg-type: package - distro-slug: centosstream-9-arm64 arch: aarch64 pkg-type: package + - distro-slug: centosstream-9-arm64 + arch: arm64 + pkg-type: package - distro-slug: debian-10 arch: x86_64 pkg-type: package @@ -128,12 +143,18 @@ jobs: - distro-slug: fedora-37-arm64 arch: aarch64 pkg-type: package + - distro-slug: fedora-37-arm64 + arch: arm64 + pkg-type: package - distro-slug: fedora-38 arch: x86_64 pkg-type: package - distro-slug: fedora-38-arm64 arch: aarch64 pkg-type: package + - distro-slug: fedora-38-arm64 + arch: arm64 + pkg-type: package - distro-slug: photonos-3 arch: x86_64 pkg-type: package diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 45d0f91ce1a..61869a27593 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -517,6 +517,11 @@ class SaltPkgInstall: if platform.is_aarch64(): arch = "arm64" + # Starting with 3007.0, we prioritize the aarch64 repo paths for rpm-based distros + if packaging.version.parse( + self.prev_version + ) >= packaging.version.parse("3007.0"): + arch = "aarch64" else: arch = "x86_64" ret = self.proc.run( diff --git a/tools/pre_commit.py b/tools/pre_commit.py index 9819b0717c0..5d257623d96 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -146,6 +146,7 @@ def generate_workflows(ctx: Context): ("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"), ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm", "no-fips"), ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm", "no-fips"), + ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64", "rpm"), ("debian-10", "Debian 10", "x86_64", "deb", "no-fips"), ("debian-11", "Debian 11", "x86_64", "deb", "no-fips"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb", "no-fips"), @@ -222,10 +223,26 @@ def generate_workflows(ctx: Context): "macos": [], "windows": [], } + rpm_slugs = [ + "almalinux", + "amazonlinux", + "centos", + "centosstream", + "fedora", + "photon", + ] for slug, display_name, arch in build_ci_deps_listing["linux"]: if slug in ("archlinux-lts", "opensuse-15"): continue test_salt_pkg_downloads_listing["linux"].append((slug, arch, "package")) + # Account for old arm64 repo paths + if arch == "aarch64": + for test_slug in rpm_slugs: + if slug.startswith(test_slug): + test_salt_pkg_downloads_listing["linux"].append( + (slug, "arm64", "package") + ) + break for slug, display_name, arch in build_ci_deps_listing["linux"][-2:]: if slug in ("archlinux-lts", "opensuse-15"): continue From c2caffd0a623856bda236ddea490536b8049214c Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 23 Oct 2023 11:55:38 -0400 Subject: [PATCH 121/196] Download the correct onedir --- .../templates/test-package-downloads-action.yml.jinja | 4 ++-- .github/workflows/test-package-downloads-action.yml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja index 348b0d17227..fd88f122122 100644 --- a/.github/workflows/templates/test-package-downloads-action.yml.jinja +++ b/.github/workflows/templates/test-package-downloads-action.yml.jinja @@ -84,7 +84,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -92,7 +92,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index 86bbb98ce0e..b75588d93b8 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -205,7 +205,7 @@ jobs: - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz path: artifacts/ - name: Decompress Onedir Tarball @@ -213,7 +213,7 @@ jobs: run: | python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} uses: actions/cache@v3 From 81bca8d4c300d649b7adc5332b277a8f2f5c2773 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 23 Oct 2023 14:34:12 -0400 Subject: [PATCH 122/196] Download the correct nox artifact for arm64 download tests --- .../workflows/templates/test-package-downloads-action.yml.jinja | 2 +- .github/workflows/test-package-downloads-action.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja index fd88f122122..e187d4b08b6 100644 --- a/.github/workflows/templates/test-package-downloads-action.yml.jinja +++ b/.github/workflows/templates/test-package-downloads-action.yml.jinja @@ -98,7 +98,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ matrix.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index b75588d93b8..b3e19314b83 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -219,7 +219,7 @@ jobs: uses: actions/cache@v3 with: path: nox.${{ matrix.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') }} # If we get a cache miss here it means the dependencies step failed to save the cache From f1c9463b6dd552ace3fedf7778c5f0b6d28953bb Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Oct 2023 16:23:22 -0400 Subject: [PATCH 123/196] Make some variables more clear --- tools/ci.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index c280ba3a3c8..4569b855d72 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -770,14 +770,14 @@ def pkg_matrix( ) ctx.exit(1) - adj_versions = [] + adjusted_versions = [] for ver in testing_releases: if ver < tools.utils.Version("3006.0"): - adj_versions.append((ver, "classic")) - adj_versions.append((ver, "tiamat")) + adjusted_versions.append((ver, "classic")) + adjusted_versions.append((ver, "tiamat")) else: - adj_versions.append((ver, "relenv")) - ctx.info(f"Will look for the following versions: {adj_versions}") + adjusted_versions.append((ver, "relenv")) + ctx.info(f"Will look for the following versions: {adjusted_versions}") # Filter out the prefixes to look under if "macos-" in distro_slug: @@ -816,14 +816,14 @@ def pkg_matrix( s3 = boto3.client("s3") paginator = s3.get_paginator("list_objects_v2") - matrix = [ + _matrix = [ { "test-chunk": "install", "version": None, } ] - for version, backend in adj_versions: + for version, backend in adjusted_versions: prefix = prefixes[backend] # TODO: Remove this after 3009.0 if backend == "relenv" and version >= tools.utils.Version("3007.0"): @@ -850,7 +850,7 @@ def pkg_matrix( f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}" ) for session in ("upgrade", "downgrade"): - matrix.append( + _matrix.append( { "test-chunk": f"{session}-classic" if backend == "classic" @@ -862,11 +862,11 @@ def pkg_matrix( ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") ctx.info("Generated matrix:") - ctx.print(matrix, soft_wrap=True) + ctx.print(_matrix, soft_wrap=True) if github_output is not None: with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"matrix={json.dumps(matrix)}\n") + wfh.write(f"matrix={json.dumps(_matrix)}\n") ctx.exit(0) From 03d3414123b54e16461c9f8c7cc55c6b28d9d052 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Oct 2023 16:31:56 -0400 Subject: [PATCH 124/196] Generate workflows --- .github/workflows/test-package-downloads-action.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index b3e19314b83..1372ace3634 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -161,18 +161,27 @@ jobs: - distro-slug: photonos-3-arm64 arch: aarch64 pkg-type: package + - distro-slug: photonos-3-arm64 + arch: arm64 + pkg-type: package - distro-slug: photonos-4 arch: x86_64 pkg-type: package - distro-slug: photonos-4-arm64 arch: aarch64 pkg-type: package + - distro-slug: photonos-4-arm64 + arch: arm64 + pkg-type: package - distro-slug: photonos-5 arch: x86_64 pkg-type: package - distro-slug: photonos-5-arm64 arch: aarch64 pkg-type: package + - distro-slug: photonos-5-arm64 + arch: arm64 + pkg-type: package - distro-slug: ubuntu-20.04 arch: x86_64 pkg-type: package From d15706871403eeb54b2f586945e4a7f31c9faab9 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 24 Oct 2023 16:32:48 -0400 Subject: [PATCH 125/196] Read releases from the prod bucket --- tools/ci.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 4569b855d72..ed8893bfb27 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -829,9 +829,8 @@ def pkg_matrix( if backend == "relenv" and version >= tools.utils.Version("3007.0"): prefix.replace("/arm64/", "/aarch64/") # Using a paginator allows us to list recursively and avoid the item limit - # TODO: Swap this for the prod bucket before merge page_iterator = paginator.paginate( - Bucket="salt-project-test-salt-artifacts-release", Prefix=prefix + Bucket="salt-project-prod-salt-artifacts-release", Prefix=prefix ) # Uses a jmespath expression to test if the wanted version is in any of the filenames key_filter = f"Contents[?contains(Key, '{version}')][]" From 70ad2a4e4ee791b46021ba672fb2ee7b6aa3c0bf Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 25 Oct 2023 13:34:39 -0400 Subject: [PATCH 126/196] Search in the right photon paths --- tools/ci.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tools/ci.py b/tools/ci.py index ed8893bfb27..5b2a14c1170 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -801,6 +801,8 @@ def pkg_matrix( name = "amazon" if "centos" in name: name = "redhat" + if "photon" in name: + name = "photon" if name == "windows": prefixes = { "classic": "windows/", From e141799a9417b138305bc3796459c991041f3e1f Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Mon, 23 Oct 2023 13:36:22 -0600 Subject: [PATCH 127/196] Fix photon upgrade tests --- pkg/tests/support/helpers.py | 47 +++++++++++++++++++++++++++--------- 1 file changed, 36 insertions(+), 11 deletions(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 61869a27593..75859d666f3 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -78,6 +78,12 @@ class SaltPkgInstall: distro_name: str = attr.ib(init=False) distro_version: str = attr.ib(init=False) + # Version information + prev_version: str = attr.ib() + use_prev_version: str = attr.ib() + artifact_version: str = attr.ib(init=False) + version: str = attr.ib(init=False) + # Package (and management) metadata pkg_mngr: str = attr.ib(init=False) rm_pkg: str = attr.ib(init=False) @@ -86,12 +92,6 @@ class SaltPkgInstall: file_ext: bool = attr.ib(default=None) relenv: bool = attr.ib(default=True) - # Version information - prev_version: str = attr.ib() - use_prev_version: str = attr.ib() - artifact_version: str = attr.ib(init=False) - version: str = attr.ib(init=False) - @proc.default def _default_proc(self): return Subprocess() @@ -106,11 +106,16 @@ class SaltPkgInstall: @distro_name.default def _default_distro_name(self): - if distro.name(): - return distro.name().split()[0].lower() + name = distro.name() + if name: + if "vmware" in name.lower(): + return name.split()[1].lower() + return name.split()[0].lower() @distro_version.default def _default_distro_version(self): + if self.distro_name == "photon": + return distro.version().split(".")[0] return distro.version().lower() @pkg_mngr.default @@ -141,8 +146,12 @@ class SaltPkgInstall: ] if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): salt_pkgs.append("salt") + dbg_pkg = "salt-debuginfo" elif self.distro_id in ("ubuntu", "debian"): salt_pkgs.append("salt-common") + dbg_pkg = "salt-dbg" + if packaging.version.parse(self.version) >= packaging.version.parse("3006.3"): + salt_pkgs.append(dbg_pkg) return salt_pkgs @install_dir.default @@ -439,9 +448,14 @@ class SaltPkgInstall: ] log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) args = extra_args + self.pkgs + upgrade_cmd = "upgrade" + if self.distro_id == "photon": + # tdnf does not detect nightly build versions to be higher version + # than release versions + upgrade_cmd = "install" ret = self.proc.run( self.pkg_mngr, - "upgrade", + upgrade_cmd, "-y", *args, _timeout=120, @@ -505,7 +519,14 @@ class SaltPkgInstall: if self.classic: root_url = "py3/" - if self.distro_name in ["redhat", "centos", "amazon", "fedora", "vmware"]: + if self.distro_name in [ + "redhat", + "centos", + "amazon", + "fedora", + "vmware", + "photon", + ]: # Removing EPEL repo files for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"): fp.unlink() @@ -534,7 +555,11 @@ class SaltPkgInstall: f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}.repo", f"/etc/yum.repos.d/salt-{distro_name}.repo", ) - ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") + if self.distro_name == "photon": + # yum version on photon doesn't support expire-cache + ret = self.proc.run(self.pkg_mngr, "clean", "all") + else: + ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") self._check_retcode(ret) cmd_action = "downgrade" if downgrade else "install" pkgs_to_install = self.salt_pkgs.copy() From c16434074fba449bda9a5c66fe0e6bfd5b72a1fe Mon Sep 17 00:00:00 2001 From: Megan Wilhite Date: Wed, 25 Oct 2023 13:35:32 -0600 Subject: [PATCH 128/196] Fix errors with debug pkg --- pkg/tests/support/helpers.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index 75859d666f3..b868e4d0790 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -87,6 +87,7 @@ class SaltPkgInstall: # Package (and management) metadata pkg_mngr: str = attr.ib(init=False) rm_pkg: str = attr.ib(init=False) + dbg_pkg: str = attr.ib(init=False) salt_pkgs: List[str] = attr.ib(init=False) pkgs: List[str] = attr.ib(factory=list) file_ext: bool = attr.ib(default=None) @@ -134,6 +135,15 @@ class SaltPkgInstall: elif self.distro_id in ("ubuntu", "debian"): return "purge" + @dbg_pkg.default + def _default_dbg_pkg(self): + dbg_pkg = None + if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): + dbg_pkg = "salt-debuginfo" + elif self.distro_id in ("ubuntu", "debian"): + dbg_pkg = "salt-dbg" + return dbg_pkg + @salt_pkgs.default def _default_salt_pkgs(self): salt_pkgs = [ @@ -146,12 +156,11 @@ class SaltPkgInstall: ] if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): salt_pkgs.append("salt") - dbg_pkg = "salt-debuginfo" elif self.distro_id in ("ubuntu", "debian"): salt_pkgs.append("salt-common") - dbg_pkg = "salt-dbg" if packaging.version.parse(self.version) >= packaging.version.parse("3006.3"): - salt_pkgs.append(dbg_pkg) + if self.dbg_pkg: + salt_pkgs.append(self.dbg_pkg) return salt_pkgs @install_dir.default @@ -573,6 +582,11 @@ class SaltPkgInstall: idx = list_ret.index("Available Packages") old_ver = list_ret[idx + 1].split()[1] pkgs_to_install = [f"{pkg}-{old_ver}" for pkg in pkgs_to_install] + if self.dbg_pkg: + # self.dbg_pkg does not exist on classic packages + dbg_exists = [x for x in pkgs_to_install if self.dbg_pkg in x] + if dbg_exists: + pkgs_to_install.remove(dbg_exists[0]) cmd_action = "install" ret = self.proc.run( self.pkg_mngr, From 7b24b91450637f748eaaac2178b857cd05b9edae Mon Sep 17 00:00:00 2001 From: Caleb Beard <53276404+MKLeb@users.noreply.github.com> Date: Wed, 8 Nov 2023 13:20:04 -0500 Subject: [PATCH 129/196] Update tools/ci.py Co-authored-by: Pedro Algarvio --- tools/ci.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/ci.py b/tools/ci.py index 5b2a14c1170..dadeda19876 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -832,7 +832,7 @@ def pkg_matrix( prefix.replace("/arm64/", "/aarch64/") # Using a paginator allows us to list recursively and avoid the item limit page_iterator = paginator.paginate( - Bucket="salt-project-prod-salt-artifacts-release", Prefix=prefix + Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release", Prefix=prefix ) # Uses a jmespath expression to test if the wanted version is in any of the filenames key_filter = f"Contents[?contains(Key, '{version}')][]" From 92e2d79fb6c382efcd71f310043b2aea7c8156af Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 8 Nov 2023 14:42:37 -0500 Subject: [PATCH 130/196] We are updating th paths for 3006.5 now --- pkg/tests/support/helpers.py | 4 ++-- tools/ci.py | 5 +++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/pkg/tests/support/helpers.py b/pkg/tests/support/helpers.py index b868e4d0790..e835223cf18 100644 --- a/pkg/tests/support/helpers.py +++ b/pkg/tests/support/helpers.py @@ -547,10 +547,10 @@ class SaltPkgInstall: if platform.is_aarch64(): arch = "arm64" - # Starting with 3007.0, we prioritize the aarch64 repo paths for rpm-based distros + # Starting with 3006.5, we prioritize the aarch64 repo paths for rpm-based distros if packaging.version.parse( self.prev_version - ) >= packaging.version.parse("3007.0"): + ) >= packaging.version.parse("3006.5"): arch = "aarch64" else: arch = "x86_64" diff --git a/tools/ci.py b/tools/ci.py index dadeda19876..389cb6acc88 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -828,11 +828,12 @@ def pkg_matrix( for version, backend in adjusted_versions: prefix = prefixes[backend] # TODO: Remove this after 3009.0 - if backend == "relenv" and version >= tools.utils.Version("3007.0"): + if backend == "relenv" and version >= tools.utils.Version("3006.5"): prefix.replace("/arm64/", "/aarch64/") # Using a paginator allows us to list recursively and avoid the item limit page_iterator = paginator.paginate( - Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release", Prefix=prefix + Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release", + Prefix=prefix, ) # Uses a jmespath expression to test if the wanted version is in any of the filenames key_filter = f"Contents[?contains(Key, '{version}')][]" From 7d3391632da3a1418ad25b4adf8d5d2b863186f3 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 9 Nov 2023 12:29:21 -0500 Subject: [PATCH 131/196] Add arm64 paths for photon 5 --- .github/workflows/nightly.yml | 4 ++++ .github/workflows/staging.yml | 4 ++++ .github/workflows/templates/build-rpm-repo.yml.jinja | 1 + 3 files changed, 9 insertions(+) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index d888e64c5c1..119749c32c4 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -3262,6 +3262,10 @@ jobs: distro: photon version: "5" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "5" + arch: arm64 - pkg-type: rpm distro: photon version: "5" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 99a541e3e7b..30602759695 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -3077,6 +3077,10 @@ jobs: distro: photon version: "5" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "5" + arch: arm64 - pkg-type: rpm distro: photon version: "5" diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 46c427c09b8..b572d2bb6f3 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -34,6 +34,7 @@ ("photon", "4", "arm64"), ("photon", "4", "aarch64"), ("photon", "5", "x86_64"), + ("photon", "5", "arm64"), ("photon", "5", "aarch64"), ) %> - pkg-type: rpm From ce1f7b78be325dab46c35b1aa14ac069d5712127 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 9 Nov 2023 12:36:29 -0500 Subject: [PATCH 132/196] Fix rc version comparing when aarch64 is in the repo paths --- pkg/tests/integration/test_version.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py index d559b060665..12bc5320fe8 100644 --- a/pkg/tests/integration/test_version.py +++ b/pkg/tests/integration/test_version.py @@ -111,14 +111,14 @@ def test_compare_pkg_versions_redhat_rc(version, install_salt): package of the same version. For example, v3004~rc1 should be less than v3004. """ - if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora", "photon"): pytest.skip("Only tests rpm packages") pkg = [x for x in install_salt.pkgs if "rpm" in x] if not pkg: pytest.skip("Not testing rpm packages") pkg = pkg[0].split("/")[-1] - if not re.search(r"rc[0-9]", pkg): + if "rc" not in ".".join(pkg.split(".")[:2]): pytest.skip("Not testing an RC package") assert "~" in pkg comp_pkg = pkg.split("~")[0] From 6b8d5939bf028e59ac3123cfdb59966b1a7dcf23 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 13 Nov 2023 17:30:00 -0500 Subject: [PATCH 133/196] Generate workflows --- .github/workflows/test-package-downloads-action.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml index 1372ace3634..4ed42e2202a 100644 --- a/.github/workflows/test-package-downloads-action.yml +++ b/.github/workflows/test-package-downloads-action.yml @@ -89,12 +89,18 @@ jobs: - distro-slug: amazonlinux-2-arm64 arch: aarch64 pkg-type: package + - distro-slug: amazonlinux-2-arm64 + arch: arm64 + pkg-type: package - distro-slug: amazonlinux-2023 arch: x86_64 pkg-type: package - distro-slug: amazonlinux-2023-arm64 arch: aarch64 pkg-type: package + - distro-slug: amazonlinux-2023-arm64 + arch: arm64 + pkg-type: package - distro-slug: centos-7 arch: x86_64 pkg-type: package From 06498742e476d18f4275e82d514447e51cc27cd1 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 14 Nov 2023 12:09:29 -0500 Subject: [PATCH 134/196] Add arm64 paths for amazon2023 --- .github/workflows/nightly.yml | 4 ++++ .github/workflows/staging.yml | 4 ++++ .github/workflows/templates/build-rpm-repo.yml.jinja | 1 + 3 files changed, 9 insertions(+) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 119749c32c4..c35b3126e37 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -3158,6 +3158,10 @@ jobs: distro: amazon version: "2023" arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: arm64 - pkg-type: rpm distro: amazon version: "2023" diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 30602759695..a2ab55dad87 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -2973,6 +2973,10 @@ jobs: distro: amazon version: "2023" arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: arm64 - pkg-type: rpm distro: amazon version: "2023" diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index b572d2bb6f3..7ed17a163db 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -8,6 +8,7 @@ ("amazon", "2", "arm64"), ("amazon", "2", "aarch64"), ("amazon", "2023", "x86_64"), + ("amazon", "2023", "arm64"), ("amazon", "2023", "aarch64"), ("redhat", "7", "x86_64"), ("redhat", "7", "arm64"), From 2f6cb0b229bef2a73103816bfa0222e4f5060f1a Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 15 Nov 2023 12:18:25 -0500 Subject: [PATCH 135/196] Add `no-fips` to centosstream-9-arm64 --- tools/pre_commit.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/tools/pre_commit.py b/tools/pre_commit.py index 5d257623d96..337c18ea012 100644 --- a/tools/pre_commit.py +++ b/tools/pre_commit.py @@ -146,7 +146,13 @@ def generate_workflows(ctx: Context): ("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"), ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm", "no-fips"), ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm", "no-fips"), - ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64", "rpm"), + ( + "centosstream-9-arm64", + "CentOS Stream 9 Arm64", + "aarch64", + "rpm", + "no-fips", + ), ("debian-10", "Debian 10", "x86_64", "deb", "no-fips"), ("debian-11", "Debian 11", "x86_64", "deb", "no-fips"), ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb", "no-fips"), From 658d57fbe8c2fd54298dc7bd244120b14c77a267 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Thu, 16 Nov 2023 13:33:15 -0500 Subject: [PATCH 136/196] Account for fips for photon 4 and 5 --- tools/ci.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/tools/ci.py b/tools/ci.py index 389cb6acc88..73ee34ed7be 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -852,14 +852,21 @@ def pkg_matrix( f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}" ) for session in ("upgrade", "downgrade"): + if backend == "classic": + session += "-classic" _matrix.append( { - "test-chunk": f"{session}-classic" - if backend == "classic" - else session, + "test-chunk": session, "version": str(version), } ) + if ( + backend == "relenv" + and fips is True + and distro_slug.startswith(("photonos-4", "photonos-5")) + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) else: ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") From 9e1bdd1415d845c7736e4c807f87cb7fb8308e34 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 18 Nov 2023 19:52:25 +0000 Subject: [PATCH 137/196] Skip pyinstaller related tests Signed-off-by: Pedro Algarvio --- .../utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py | 4 ++++ .../functional/utils/pyinstaller/rthooks/test_subprocess.py | 4 ++++ 2 files changed, 8 insertions(+) diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py index c45b5730a8e..95a351b4532 100644 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py +++ b/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py @@ -8,6 +8,10 @@ import salt.utils.pyinstaller.rthooks._overrides as overrides from tests.support import mock from tests.support.helpers import PatchedEnviron +pytestmark = [ + pytest.mark.skip(reason="PyInstaller is no longer used."), +] + @pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) def envvar(request): diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py index 836e392d016..ee6692bb009 100644 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py +++ b/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py @@ -9,6 +9,10 @@ import salt.utils.pyinstaller.rthooks._overrides as overrides from tests.support import mock from tests.support.helpers import PatchedEnviron +pytestmark = [ + pytest.mark.skip(reason="PyInstaller is no longer used."), +] + @pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) def envvar(request): From 3b8337c371ae81b21131761e1dad6747132cb956 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Thu, 16 Nov 2023 09:31:29 -0700 Subject: [PATCH 138/196] Add some more tests for LGPO module Split out some of the tests into their own file --- .../modules/win_lgpo/test__policy_info.py | 48 ------- ...dv_audit_settings.py => test_adv_audit.py} | 54 +++++-- .../unit/modules/win_lgpo/test_netsh.py | 135 ++++++++++++++++++ .../unit/modules/win_lgpo/test_policy_info.py | 88 ++++++++++++ .../unit/modules/win_lgpo/test_reg_pol.py | 53 +++++++ .../unit/modules/win_lgpo/test_secedit.py | 83 +++++++++++ 6 files changed, 402 insertions(+), 59 deletions(-) rename tests/pytests/unit/modules/win_lgpo/{test_adv_audit_settings.py => test_adv_audit.py} (68%) create mode 100644 tests/pytests/unit/modules/win_lgpo/test_netsh.py create mode 100644 tests/pytests/unit/modules/win_lgpo/test_reg_pol.py create mode 100644 tests/pytests/unit/modules/win_lgpo/test_secedit.py diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py index 5626d1d3f79..0b9e25ee4d5 100644 --- a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py +++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py @@ -5,7 +5,6 @@ import pytest import salt.modules.cmdmod import salt.modules.win_file import salt.modules.win_lgpo as win_lgpo -import salt.utils.win_lgpo_auditpol as ap from salt.exceptions import CommandExecutionError from tests.support.mock import patch @@ -395,53 +394,6 @@ def test__virtual__(pol_info): ) -def test_get_advaudit_defaults(): - patch_context = patch.dict(win_lgpo.__context__, {}) - patch_salt = patch.dict( - win_lgpo.__utils__, {"auditpol.get_auditpol_dump": ap.get_auditpol_dump} - ) - with patch_context, patch_salt: - assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames") - - audit_defaults = {"junk": "defaults"} - patch_context = patch.dict( - win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults} - ) - with patch_context, patch_salt: - assert win_lgpo._get_advaudit_defaults() == audit_defaults - - -def test_get_netsh_value(): - with patch.dict(win_lgpo.__context__, {"lgpo.netsh_data": {"domain": {}}}): - win_lgpo._set_netsh_value("domain", "state", "State", "NotConfigured") - with patch.dict(win_lgpo.__context__, {}): - assert win_lgpo._get_netsh_value("domain", "State") == "NotConfigured" - - context = { - "lgpo.netsh_data": { - "domain": { - "State": "ONContext", - "Inbound": "NotConfigured", - "Outbound": "NotConfigured", - "LocalFirewallRules": "NotConfigured", - }, - }, - } - with patch.dict(win_lgpo.__context__, context): - assert win_lgpo._get_netsh_value("domain", "State") == "ONContext" - - -def test_get_secedit_data(tmp_path): - with patch.dict(win_lgpo.__opts__, {"cachedir": str(tmp_path)}): - assert "[System Access]\r\n" in win_lgpo._get_secedit_data() - - -def test_get_secedit_value(tmp_path): - with patch.dict(win_lgpo.__opts__, {"cachedir": str(tmp_path)}): - assert win_lgpo._get_secedit_value("Unicode") == "yes" - assert win_lgpo._get_secedit_value("JunkKey") == "Not Defined" - - @pytest.mark.parametrize( "val, expected", ( diff --git a/tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py b/tests/pytests/unit/modules/win_lgpo/test_adv_audit.py similarity index 68% rename from tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py rename to tests/pytests/unit/modules/win_lgpo/test_adv_audit.py index c31641ec1d8..1f8e83eeab3 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py +++ b/tests/pytests/unit/modules/win_lgpo/test_adv_audit.py @@ -4,6 +4,8 @@ import salt.modules.win_file as win_file import salt.modules.win_lgpo as win_lgpo import salt.utils.win_dacl as win_dacl import salt.utils.win_lgpo_auditpol as auditpol +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch pytestmark = [ pytest.mark.windows_whitelisted, @@ -110,7 +112,16 @@ def set_policy(): ) -def _test_adv_auditing(setting, expected): +@pytest.mark.parametrize( + "setting, expected", + [ + ("No Auditing", "0"), + ("Success", "1"), + ("Failure", "2"), + ("Success and Failure", "3"), + ], +) +def test_get_value(setting, expected): """ Helper function to set an audit setting and assert that it was successful """ @@ -120,17 +131,38 @@ def _test_adv_auditing(setting, expected): assert result == expected -def test_no_auditing(disable_legacy_auditing, set_policy): - _test_adv_auditing("No Auditing", "0") +def test_get_defaults(): + patch_context = patch.dict(win_lgpo.__context__, {}) + patch_salt = patch.dict( + win_lgpo.__utils__, {"auditpol.get_auditpol_dump": auditpol.get_auditpol_dump} + ) + with patch_context, patch_salt: + assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames") + + audit_defaults = {"junk": "defaults"} + patch_context = patch.dict( + win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults} + ) + with patch_context, patch_salt: + assert win_lgpo._get_advaudit_defaults() == audit_defaults -def test_success(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Success", "1") +def test_set_value_error(): + mock_set_file_data = MagicMock(return_value=False) + with patch.object(win_lgpo, "_set_advaudit_file_data", mock_set_file_data): + with pytest.raises(CommandExecutionError): + win_lgpo._set_advaudit_value("Audit User Account Management", "None") -def test_failure(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Failure", "2") - - -def test_success_and_failure(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Success and Failure", "3") +def test_set_value_log_messages(caplog): + mock_set_file_data = MagicMock(return_value=True) + mock_set_pol_data = MagicMock(return_value=False) + mock_context = {"lgpo.adv_audit_data": {"test_option": "test_value"}} + with patch.object( + win_lgpo, "_set_advaudit_file_data", mock_set_file_data + ), patch.object(win_lgpo, "_set_advaudit_pol_data", mock_set_pol_data), patch.dict( + win_lgpo.__context__, mock_context + ): + win_lgpo._set_advaudit_value("test_option", None) + assert "Failed to apply audit setting:" in caplog.text + assert "LGPO: Removing Advanced Audit data:" in caplog.text diff --git a/tests/pytests/unit/modules/win_lgpo/test_netsh.py b/tests/pytests/unit/modules/win_lgpo/test_netsh.py new file mode 100644 index 00000000000..f3b4aef63eb --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_netsh.py @@ -0,0 +1,135 @@ +import pytest + +import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(): + return {win_lgpo: {}} + + +def test_get_netsh_value(): + with patch.dict(win_lgpo.__context__, {"lgpo.netsh_data": {"domain": {}}}): + win_lgpo._set_netsh_value("domain", "state", "State", "NotConfigured") + with patch.dict(win_lgpo.__context__, {}): + assert win_lgpo._get_netsh_value("domain", "State") == "NotConfigured" + + context = { + "lgpo.netsh_data": { + "domain": { + "State": "ONContext", + "Inbound": "NotConfigured", + "Outbound": "NotConfigured", + "LocalFirewallRules": "NotConfigured", + }, + }, + } + with patch.dict(win_lgpo.__context__, context): + assert win_lgpo._get_netsh_value("domain", "State") == "ONContext" + + +def test_set_value_error(): + with pytest.raises(ValueError): + win_lgpo._set_netsh_value("domain", "bad_section", "junk", "junk") + + +def test_set_value_firewall(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_firewall_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="firewallpolicy", + option="Inbound", + value="spongebob", + ) + mock.assert_called_once_with( + profile="domain", + inbound="spongebob", + outbound=None, + store="lgpo", + ) + + +def test_set_value_settings(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="settings", + option="spongebob", + value="squarepants", + ) + mock.assert_called_once_with( + profile="domain", + setting="spongebob", + value="squarepants", + store="lgpo", + ) + + +def test_set_value_state(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch("salt.utils.win_lgpo_netsh.set_state", MagicMock()) as mock, patch.dict( + win_lgpo.__context__, mock_context + ): + win_lgpo._set_netsh_value( + profile="domain", + section="state", + option="junk", + value="spongebob", + ) + mock.assert_called_once_with( + profile="domain", + state="spongebob", + store="lgpo", + ) + + +def test_set_value_logging_filename(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="logging", + option="FileName", + value="Not configured", + ) + mock.assert_called_once_with( + profile="domain", + setting="FileName", + value="notconfigured", + store="lgpo", + ) + + +def test_set_value_logging_log(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="logging", + option="LogSpongebob", + value="Junk", + ) + mock.assert_called_once_with( + profile="domain", + setting="Spongebob", + value="Junk", + store="lgpo", + ) diff --git a/tests/pytests/unit/modules/win_lgpo/test_policy_info.py b/tests/pytests/unit/modules/win_lgpo/test_policy_info.py index b728ab3de89..d0ed3c911a3 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_policy_info.py +++ b/tests/pytests/unit/modules/win_lgpo/test_policy_info.py @@ -5,6 +5,7 @@ import pytest import salt.modules.win_file as win_file import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch pytestmark = [ pytest.mark.windows_whitelisted, @@ -42,6 +43,18 @@ def test_get_policy_name(): assert result == expected +def test_get_adml_display_name_bad_name(): + result = win_lgpo._getAdmlDisplayName("junk", "spongbob") + assert result is None + + +def test_get_adml_display_name_no_results(): + patch_xpath = patch.object(win_lgpo, "ADML_DISPLAY_NAME_XPATH", return_value=[]) + with patch_xpath: + result = win_lgpo._getAdmlDisplayName("junk", "$(spongbob.squarepants)") + assert result is None + + def test_get_policy_id(): result = win_lgpo.get_policy( policy_name="WfwPublicSettingsNotification", @@ -156,3 +169,78 @@ def test_get_policy_id_full_return_full_names_hierarchical(): } } assert result == expected + + +def test_transform_value_missing_type(): + policy = {"Transform": {"some_type": "junk"}} + result = win_lgpo._transform_value( + value="spongebob", + policy=policy, + transform_type="different_type", + ) + assert result == "spongebob" + + +def test_transform_value_registry(): + policy = {"Registry": {}} + result = win_lgpo._transform_value( + value="spongebob", + policy=policy, + transform_type="different_type", + ) + assert result == "spongebob" + + +def test_transform_value_registry_not_set(): + policy = {"Registry": {}} + result = win_lgpo._transform_value( + value="(value not set)", + policy=policy, + transform_type="different_type", + ) + assert result == "Not Defined" + + +def test_validate_setting_not_in_list(): + policy = {"Settings": ["junk"]} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert not result + + +def test_validate_setting_in_list(): + policy = {"Settings": ["spongebob"]} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert result + + +def test_validate_setting_not_list_or_dict(): + policy = {"Settings": "spongebob"} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert result + + +def test_add_account_rights_error(): + patch_w32sec = patch( + "win32security.LsaOpenPolicy", MagicMock(side_effect=Exception) + ) + with patch_w32sec: + assert win_lgpo._addAccountRights("spongebob", "junk") is False + + +def test_del_account_rights_error(): + patch_w32sec = patch( + "win32security.LsaOpenPolicy", MagicMock(side_effect=Exception) + ) + with patch_w32sec: + assert win_lgpo._delAccountRights("spongebob", "junk") is False + + +def test_validate_setting_no_function(): + policy = { + "Settings": { + "Function": "_in_range_inclusive", + "Args": {"min": 0, "max": 24}, + }, + } + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert not result diff --git a/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py b/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py new file mode 100644 index 00000000000..79c8a10393c --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py @@ -0,0 +1,53 @@ +""" +:codeauthor: Shane Lee +""" +import pytest + +import salt.modules.win_lgpo as win_lgpo + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, +] + + +@pytest.fixture +def reg_pol_dword(): + data = ( + b"PReg\x01\x00\x00\x00" # Header + b"[\x00" # Opening list of policies + b"S\x00o\x00m\x00e\x00\\\x00K\x00e\x00y\x00\x00\x00;\x00" # Key + b"V\x00a\x00l\x00u\x00e\x00N\x00a\x00m\x00e\x00\x00\x00;\x00" # Value + b"\x04\x00\x00\x00;\x00" # Reg DWord Type + b"\x04\x00\x00\x00;\x00" # Size + # b"\x01\x00\x00\x00" # Reg Dword Data + b"\x00\x00\x00\x00" # No Data + b"]\x00" # Closing list of policies + ) + yield data + + +def test_get_data_from_reg_pol_data(reg_pol_dword): + encoded_name = "ValueName".encode("utf-16-le") + encoded_null = chr(0).encode("utf-16-le") + encoded_semicolon = ";".encode("utf-16-le") + encoded_type = chr(4).encode("utf-16-le") + encoded_size = chr(4).encode("utf-16-le") + search_string = b"".join( + [ + encoded_semicolon, + encoded_name, + encoded_null, + encoded_semicolon, + encoded_type, + encoded_null, + encoded_semicolon, + encoded_size, + encoded_null, + ] + ) + result = win_lgpo._getDataFromRegPolData( + search_string, reg_pol_dword, return_value_name=True + ) + assert result == {"ValueName": 0} diff --git a/tests/pytests/unit/modules/win_lgpo/test_secedit.py b/tests/pytests/unit/modules/win_lgpo/test_secedit.py new file mode 100644 index 00000000000..47a39fb8250 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_secedit.py @@ -0,0 +1,83 @@ +import pytest + +import salt.modules.cmdmod as cmd +import salt.modules.win_file as win_file +import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(tmp_path): + cachedir = tmp_path / "__test_admx_policy_cache_dir" + cachedir.mkdir(parents=True, exist_ok=True) + return { + win_lgpo: { + "__salt__": { + "cmd.run": cmd.run, + "file.file_exists": win_file.file_exists, + "file.remove": win_file.remove, + }, + "__opts__": { + "cachedir": str(cachedir), + }, + }, + } + + +def test_load_secedit_data(): + result = win_lgpo._load_secedit_data() + result = [x.strip() for x in result] + assert "[Unicode]" in result + assert "[System Access]" in result + + +def test_get_secedit_data(): + with patch.dict(win_lgpo.__context__, {}): + result = win_lgpo._get_secedit_data() + result = [x.strip() for x in result] + assert "[Unicode]" in result + assert "[System Access]" in result + + +def test_get_secedit_data_existing_context(): + mock_context = {"lgpo.secedit_data": ["spongebob", "squarepants"]} + with patch.dict(win_lgpo.__context__, mock_context): + result = win_lgpo._get_secedit_data() + result = [x.strip() for x in result] + assert "spongebob" in result + assert "squarepants" in result + + +def test_get_secedit_value(): + result = win_lgpo._get_secedit_value("AuditDSAccess") + assert result == "0" + + +def test_get_secedit_value_not_defined(): + result = win_lgpo._get_secedit_value("Spongebob") + assert result == "Not Defined" + + +def test_write_secedit_data_import_fail(caplog): + patch_cmd_retcode = patch.dict( + win_lgpo.__salt__, {"cmd.retcode": MagicMock(return_value=1)} + ) + with patch_cmd_retcode: + assert win_lgpo._write_secedit_data("spongebob") is False + assert "Secedit failed to import template data" in caplog.text + + +def test_write_secedit_data_configure_fail(caplog): + patch_cmd_retcode = patch.dict( + win_lgpo.__salt__, {"cmd.retcode": MagicMock(side_effect=[0, 1])} + ) + with patch_cmd_retcode: + assert win_lgpo._write_secedit_data("spongebob") is False + assert "Secedit failed to apply security database" in caplog.text From 5463132e9d10c0a2f1365722893d9869ff0c84b5 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 16 Nov 2023 21:45:51 +0000 Subject: [PATCH 139/196] Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. Fixes #65581 Signed-off-by: Pedro Algarvio --- changelog/65581.fixed.md | 1 + pkg/rpm/salt.spec | 32 ++++++++++++++++++++++++-------- 2 files changed, 25 insertions(+), 8 deletions(-) create mode 100644 changelog/65581.fixed.md diff --git a/changelog/65581.fixed.md b/changelog/65581.fixed.md new file mode 100644 index 00000000000..3ac7427b698 --- /dev/null +++ b/changelog/65581.fixed.md @@ -0,0 +1 @@ +Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 4659c9fd343..1e9c31f08e4 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -463,8 +463,12 @@ if [ $1 -lt 2 ]; then # ensure hmac are up to date, master or minion, rest install one or the other # key used is from openssl/crypto/fips/fips_standalone_hmac.c openssl 1.1.1k if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/libssl.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/libcrypto.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi @@ -482,8 +486,12 @@ if [ $1 -lt 2 ]; then # ensure hmac are up to date, master or minion, rest install one or the other # key used is from openssl/crypto/fips/fips_standalone_hmac.c openssl 1.1.1k if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/libssl.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/libcrypto.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi @@ -537,8 +545,12 @@ if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then if [ -z "$(rpm -qi salt-minion | grep Name | grep salt-minion)" ]; then # uninstall and no minion running - /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/.libssl.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi fi @@ -552,8 +564,12 @@ if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then if [ -z "$(rpm -qi salt-master | grep Name | grep salt-master)" ]; then # uninstall and no master running - /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/.libssl.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi fi From 43e17e62ac32b1edf6d98c226e9dc9903877fa64 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 19 Nov 2023 19:43:35 +0000 Subject: [PATCH 140/196] Remove the custom pyinstaller support code needed for Salt < 3006.x Signed-off-by: Pedro Algarvio --- salt/utils/pyinstaller/__init__.py | 21 --- salt/utils/pyinstaller/hook-salt.py | 146 ------------------ salt/utils/pyinstaller/rthooks.dat | 4 - salt/utils/pyinstaller/rthooks/__init__.py | 3 - salt/utils/pyinstaller/rthooks/_overrides.py | 84 ---------- .../rthooks/pyi_rth_salt.utils.vt.py | 13 -- .../pyinstaller/rthooks/pyi_rth_subprocess.py | 13 -- .../functional/utils/pyinstaller/__init__.py | 0 .../utils/pyinstaller/rthooks/__init__.py | 0 .../rthooks/test_salt_utils_vt_terminal.py | 146 ------------------ .../pyinstaller/rthooks/test_subprocess.py | 115 -------------- 11 files changed, 545 deletions(-) delete mode 100644 salt/utils/pyinstaller/__init__.py delete mode 100644 salt/utils/pyinstaller/hook-salt.py delete mode 100644 salt/utils/pyinstaller/rthooks.dat delete mode 100644 salt/utils/pyinstaller/rthooks/__init__.py delete mode 100644 salt/utils/pyinstaller/rthooks/_overrides.py delete mode 100644 salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py delete mode 100644 salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py delete mode 100644 tests/pytests/functional/utils/pyinstaller/__init__.py delete mode 100644 tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py delete mode 100644 tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py delete mode 100644 tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py diff --git a/salt/utils/pyinstaller/__init__.py b/salt/utils/pyinstaller/__init__.py deleted file mode 100644 index eb8a6a85fb4..00000000000 --- a/salt/utils/pyinstaller/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -This module exists to help PyInstaller bundle Salt -""" -import pathlib - -PYINSTALLER_UTILS_DIR_PATH = pathlib.Path(__file__).resolve().parent - - -def get_hook_dirs(): - """ - Return a list of paths that PyInstaller can search for hooks. - """ - hook_dirs = {PYINSTALLER_UTILS_DIR_PATH} - for path in PYINSTALLER_UTILS_DIR_PATH.iterdir(): - if not path.is_dir(): - continue - if "__pycache__" in path.parts: - continue - hook_dirs.add(path) - - return sorted(str(p) for p in hook_dirs) diff --git a/salt/utils/pyinstaller/hook-salt.py b/salt/utils/pyinstaller/hook-salt.py deleted file mode 100644 index cad74ffd98c..00000000000 --- a/salt/utils/pyinstaller/hook-salt.py +++ /dev/null @@ -1,146 +0,0 @@ -# pylint: disable=3rd-party-module-not-gated - -import logging -import pathlib -import sys - -from PyInstaller.utils import hooks - -log = logging.getLogger(__name__) - - -def _filter_stdlib_tests(name): - """ - Filter out non useful modules from the stdlib - """ - if ".test." in name: - return False - if ".tests." in name: - return False - if ".idle_test" in name: - return False - return True - - -def _python_stdlib_path(): - """ - Return the path to the standard library folder - """ - base_exec_prefix = pathlib.Path(sys.base_exec_prefix) - log.info("Grabbing 'base_exec_prefix' for platform: %s", sys.platform) - if not sys.platform.lower().startswith("win"): - return base_exec_prefix / "lib" / "python{}.{}".format(*sys.version_info) - return base_exec_prefix / "Lib" - - -def _collect_python_stdlib_hidden_imports(): - """ - Collect all of the standard library(most of it) as hidden imports. - """ - _hidden_imports = set() - - stdlib = _python_stdlib_path() - if not stdlib.exists(): - log.error("The path '%s' does not exist", stdlib) - return list(_hidden_imports) - - log.info( - "Collecting hidden imports from the python standard library at: %s", - stdlib, - ) - for path in stdlib.glob("*"): - if path.is_dir(): - if path.name in ( - "__pycache__", - "site-packages", - "test", - "turtledemo", - "ensurepip", - ): - continue - if path.joinpath("__init__.py").is_file(): - log.info("Collecting: %s", path.name) - try: - _module_hidden_imports = hooks.collect_submodules( - path.name, filter=_filter_stdlib_tests - ) - log.debug("Collected(%s): %s", path.name, _module_hidden_imports) - _hidden_imports.update(set(_module_hidden_imports)) - except Exception as exc: # pylint: disable=broad-except - log.error("Failed to collect %r: %s", path.name, exc) - continue - if path.suffix not in (".py", ".pyc", ".pyo"): - continue - _hidden_imports.add(path.stem) - log.info("Collected stdlib hidden imports: %s", sorted(_hidden_imports)) - return sorted(_hidden_imports) - - -def _collect_python_stdlib_dynamic_libraries(): - """ - Collect all of the standard library(most of it) dynamic libraries. - """ - _dynamic_libs = set() - - stdlib = _python_stdlib_path() - if not stdlib.exists(): - log.error("The path '%s' does not exist", stdlib) - return list(_dynamic_libs) - - log.info( - "Collecting dynamic libraries from the python standard library at: %s", - stdlib, - ) - for path in stdlib.glob("*"): - if not path.is_dir(): - continue - if path.name in ( - "__pycache__", - "site-packages", - "test", - "turtledemo", - "ensurepip", - ): - continue - if path.joinpath("__init__.py").is_file(): - log.info("Collecting: %s", path.name) - try: - _module_dynamic_libs = hooks.collect_dynamic_libs(path.name, path.name) - log.debug("Collected(%s): %s", path.name, _module_dynamic_libs) - _dynamic_libs.update(set(_module_dynamic_libs)) - except Exception as exc: # pylint: disable=broad-except - log.error("Failed to collect %r: %s", path.name, exc) - log.info("Collected stdlib dynamic libs: %s", sorted(_dynamic_libs)) - return sorted(_dynamic_libs) - - -def _filter_submodules(name): - # this should never happen, but serves as a place-holder for when/if we have to filter - if not name.startswith("salt"): - return False - return True - - -# Collect Salt datas, binaries(should be None) and hidden imports -SALT_DATAS, SALT_BINARIES, SALT_HIDDENIMPORTS = hooks.collect_all( - "salt", - include_py_files=True, - filter_submodules=_filter_submodules, -) - -# In case there's salt-extensions installed, collect their datas and hidden imports -SALT_EXTENSIONS_DATAS, SALT_EXTENSIONS_HIDDENIMPORTS = hooks.collect_entry_point( - "salt.loader" -) - - -# PyInstaller attributes -datas = sorted(set(SALT_DATAS + SALT_EXTENSIONS_DATAS)) -binaries = sorted(set(SALT_BINARIES)) -hiddenimports = sorted( - set( - SALT_HIDDENIMPORTS - + SALT_EXTENSIONS_HIDDENIMPORTS - + _collect_python_stdlib_hidden_imports() - ) -) diff --git a/salt/utils/pyinstaller/rthooks.dat b/salt/utils/pyinstaller/rthooks.dat deleted file mode 100644 index b54f09a1df4..00000000000 --- a/salt/utils/pyinstaller/rthooks.dat +++ /dev/null @@ -1,4 +0,0 @@ -{ - "subprocess": ["pyi_rth_subprocess.py"], - "salt.utils.vt": ["pyi_rth_salt.utils.vt.py"], -} diff --git a/salt/utils/pyinstaller/rthooks/__init__.py b/salt/utils/pyinstaller/rthooks/__init__.py deleted file mode 100644 index 00c319dfa30..00000000000 --- a/salt/utils/pyinstaller/rthooks/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains support code to package Salt with PyInstaller. -""" diff --git a/salt/utils/pyinstaller/rthooks/_overrides.py b/salt/utils/pyinstaller/rthooks/_overrides.py deleted file mode 100644 index ad422aeb7ed..00000000000 --- a/salt/utils/pyinstaller/rthooks/_overrides.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -This package contains the runtime hooks support code for when Salt is pacakged with PyInstaller. -""" -import io -import logging -import os -import subprocess -import sys - -import salt.utils.vt - -log = logging.getLogger(__name__) - - -def clean_pyinstaller_vars(environ): - """ - Restore or cleanup PyInstaller specific environent variable behavior. - """ - if environ is None: - environ = dict(os.environ) - # When Salt is bundled with tiamat, it MUST NOT contain LD_LIBRARY_PATH - # when shelling out, or, at least the value of LD_LIBRARY_PATH set by - # pyinstaller. - # See: - # https://pyinstaller.readthedocs.io/en/stable/runtime-information.html#ld-library-path-libpath-considerations - for varname in ("LD_LIBRARY_PATH", "LIBPATH"): - original_varname = "{}_ORIG".format(varname) - if varname in environ and environ[varname] == sys._MEIPASS: - # If we find the varname on the user provided environment we need to at least - # check if it's not the value set by PyInstaller, if it is, remove it. - log.debug( - "User provided environment variable %r with value %r which is " - "the value that PyInstaller set's. Removing it", - varname, - environ[varname], - ) - environ.pop(varname) - - if original_varname in environ and varname not in environ: - # We found the original variable set by PyInstaller, and we didn't find - # any user provided variable, let's rename it. - log.debug( - "The %r variable was found in the passed environment, renaming it to %r", - original_varname, - varname, - ) - environ[varname] = environ.pop(original_varname) - - if varname not in environ: - if original_varname in os.environ: - log.debug( - "Renaming environment variable %r to %r", original_varname, varname - ) - environ[varname] = os.environ[original_varname] - elif varname in os.environ: - # Override the system environ variable with an empty one - log.debug("Setting environment variable %r to an empty string", varname) - environ[varname] = "" - return environ - - -class PyinstallerPopen(subprocess.Popen): - def __init__(self, *args, **kwargs): - kwargs["env"] = clean_pyinstaller_vars(kwargs.pop("env", None)) - super().__init__(*args, **kwargs) - - # From https://github.com/pyinstaller/pyinstaller/blob/v5.1/PyInstaller/hooks/rthooks/pyi_rth_subprocess.py - # - # In windowed mode, force any unused pipes (stdin, stdout and stderr) to be DEVNULL instead of inheriting the - # invalid corresponding handles from this parent process. - if sys.platform == "win32" and not isinstance(sys.stdout, io.IOBase): - - def _get_handles(self, stdin, stdout, stderr): - stdin, stdout, stderr = ( - subprocess.DEVNULL if pipe is None else pipe - for pipe in (stdin, stdout, stderr) - ) - return super()._get_handles(stdin, stdout, stderr) - - -class PyinstallerTerminal(salt.utils.vt.Terminal): # pylint: disable=abstract-method - def __init__(self, *args, **kwargs): - kwargs["env"] = clean_pyinstaller_vars(kwargs.pop("env", None)) - super().__init__(*args, **kwargs) diff --git a/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py b/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py deleted file mode 100644 index f16a9d954e0..00000000000 --- a/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -PyInstaller runtime hook to patch salt.utils.vt.Terminal -""" -import logging - -import salt.utils.vt -from salt.utils.pyinstaller.rthooks._overrides import PyinstallerTerminal - -log = logging.getLogger(__name__) -# Patch salt.utils.vt.Terminal when running within a pyinstalled bundled package -salt.utils.vt.Terminal = PyinstallerTerminal - -log.debug("Replaced 'salt.utils.vt.Terminal' with 'PyinstallerTerminal'") diff --git a/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py b/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py deleted file mode 100644 index a00ad7fc33b..00000000000 --- a/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -PyInstaller runtime hook to patch subprocess.Popen -""" -import logging -import subprocess - -from salt.utils.pyinstaller.rthooks._overrides import PyinstallerPopen - -log = logging.getLogger(__name__) -# Patch subprocess.Popen when running within a pyinstalled bundled package -subprocess.Popen = PyinstallerPopen - -log.debug("Replaced 'subprocess.Popen' with 'PyinstallerTerminal'") diff --git a/tests/pytests/functional/utils/pyinstaller/__init__.py b/tests/pytests/functional/utils/pyinstaller/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py b/tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py deleted file mode 100644 index 95a351b4532..00000000000 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py +++ /dev/null @@ -1,146 +0,0 @@ -import json -import os -import sys - -import pytest - -import salt.utils.pyinstaller.rthooks._overrides as overrides -from tests.support import mock -from tests.support.helpers import PatchedEnviron - -pytestmark = [ - pytest.mark.skip(reason="PyInstaller is no longer used."), -] - - -@pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) -def envvar(request): - return request.param - - -@pytest.fixture -def meipass(envvar): - with mock.patch("salt.utils.pyinstaller.rthooks._overrides.sys") as patched_sys: - patched_sys._MEIPASS = "{}_VALUE".format(envvar) - assert overrides.sys._MEIPASS == "{}_VALUE".format(envvar) - yield "{}_VALUE".format(envvar) - assert not hasattr(sys, "_MEIPASS") - assert not hasattr(overrides.sys, "_MEIPASS") - - -def test_vt_terminal_environ_cleanup_original(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - with PatchedEnviron(**{orig_envvar: meipass}): - original_env = dict(os.environ) - assert orig_envvar in original_env - instance = overrides.PyinstallerTerminal( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_vt_terminal_environ_cleanup_original_passed_directly(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - env = { - orig_envvar: meipass, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerTerminal( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_vt_terminal_environ_cleanup(envvar, meipass): - with PatchedEnviron(**{envvar: meipass}): - original_env = dict(os.environ) - assert envvar in original_env - instance = overrides.PyinstallerTerminal( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == "" - - -def test_vt_terminal_environ_cleanup_passed_directly_not_removed(envvar, meipass): - env = { - envvar: envvar, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerTerminal( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == envvar diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py deleted file mode 100644 index ee6692bb009..00000000000 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py +++ /dev/null @@ -1,115 +0,0 @@ -import json -import os -import subprocess -import sys - -import pytest - -import salt.utils.pyinstaller.rthooks._overrides as overrides -from tests.support import mock -from tests.support.helpers import PatchedEnviron - -pytestmark = [ - pytest.mark.skip(reason="PyInstaller is no longer used."), -] - - -@pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) -def envvar(request): - return request.param - - -@pytest.fixture -def meipass(envvar): - with mock.patch("salt.utils.pyinstaller.rthooks._overrides.sys") as patched_sys: - patched_sys._MEIPASS = "{}_VALUE".format(envvar) - assert overrides.sys._MEIPASS == "{}_VALUE".format(envvar) - yield "{}_VALUE".format(envvar) - assert not hasattr(sys, "_MEIPASS") - assert not hasattr(overrides.sys, "_MEIPASS") - - -def test_subprocess_popen_environ_cleanup_original(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - with PatchedEnviron(**{orig_envvar: meipass}): - original_env = dict(os.environ) - assert orig_envvar in original_env - instance = overrides.PyinstallerPopen( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_subprocess_popen_environ_cleanup_original_passed_directly(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - env = { - orig_envvar: meipass, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerPopen( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_subprocess_popen_environ_cleanup(envvar, meipass): - with PatchedEnviron(**{envvar: meipass}): - original_env = dict(os.environ) - assert envvar in original_env - instance = overrides.PyinstallerPopen( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == "" - - -def test_subprocess_popen_environ_cleanup_passed_directly_not_removed(envvar, meipass): - env = { - envvar: envvar, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerPopen( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == envvar From 374cf2432159b2cb343c197bcf1c51af3fc5f4bd Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 20 Nov 2023 13:15:04 +0000 Subject: [PATCH 141/196] Increase timeout on CLI call to allow the test to pass Signed-off-by: Pedro Algarvio --- pkg/tests/integration/test_version.py | 17 ++++++++++++++++- 1 file changed, 16 insertions(+), 1 deletion(-) diff --git a/pkg/tests/integration/test_version.py b/pkg/tests/integration/test_version.py index 12bc5320fe8..d905155d60f 100644 --- a/pkg/tests/integration/test_version.py +++ b/pkg/tests/integration/test_version.py @@ -44,7 +44,22 @@ def test_salt_versions_report_minion(salt_cli, salt_minion): """ Test running test.versions_report on minion """ - ret = salt_cli.run("test.versions_report", minion_tgt=salt_minion.id) + # Make sure the minion is running + assert salt_minion.is_running() + # Make sure we can ping the minion ... + ret = salt_cli.run( + "--timeout=240", "test.ping", minion_tgt=salt_minion.id, _timeout=240 + ) + assert ret.returncode == 0 + assert ret.data is True + ret = salt_cli.run( + "--hard-crash", + "--failhard", + "--timeout=240", + "test.versions_report", + minion_tgt=salt_minion.id, + _timeout=240, + ) ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) From e19cf37a61670c22f9989a76e74d252424a01ffb Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 20 Nov 2023 13:19:28 +0000 Subject: [PATCH 142/196] Try a few times Signed-off-by: Pedro Algarvio --- pkg/tests/integration/test_pkg.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/pkg/tests/integration/test_pkg.py b/pkg/tests/integration/test_pkg.py index 6e90e0a9349..bb84e5b9e27 100644 --- a/pkg/tests/integration/test_pkg.py +++ b/pkg/tests/integration/test_pkg.py @@ -1,4 +1,5 @@ import sys +import time import pytest @@ -8,8 +9,16 @@ def pkg_name(salt_call_cli, grains): if sys.platform.startswith("win"): ret = salt_call_cli.run("--local", "winrepo.update_git_repos") assert ret.returncode == 0 - ret = salt_call_cli.run("--local", "pkg.refresh_db") - assert ret.returncode == 0 + attempts = 3 + while attempts: + attempts -= 1 + ret = salt_call_cli.run("--local", "pkg.refresh_db") + if ret.returncode: + time.sleep(5) + continue + break + else: + pytest.fail("Failed to run 'pkg.refresh_db' 3 times.") return "putty" elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": From 6450dde07cd91f8c17fe0022badd4277e69545c0 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 20 Nov 2023 20:28:29 +0000 Subject: [PATCH 143/196] Set `open_mode` to `True` for the package tests Signed-off-by: Pedro Algarvio --- pkg/tests/conftest.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py index 63610564cef..d550a118100 100644 --- a/pkg/tests/conftest.py +++ b/pkg/tests/conftest.py @@ -340,6 +340,7 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): "netapi_enable_clients": ["local"], "external_auth": {"auto": {"saltdev": [".*"]}}, "fips_mode": FIPS_TESTRUN, + "open_mode": True, } test_user = False master_config = install_salt.config_path / "master" @@ -400,7 +401,6 @@ def salt_master(salt_factories, install_salt, state_tree, pillar_tree): scripts_dir = salt_factories.root_dir / "Scripts" scripts_dir.mkdir(exist_ok=True) salt_factories.scripts_dir = scripts_dir - config_overrides["open_mode"] = True python_executable = install_salt.bin_dir / "Scripts" / "python.exe" if install_salt.classic: python_executable = install_salt.bin_dir / "python.exe" @@ -474,6 +474,7 @@ def salt_minion(salt_factories, salt_master, install_salt): "file_roots": salt_master.config["file_roots"].copy(), "pillar_roots": salt_master.config["pillar_roots"].copy(), "fips_mode": FIPS_TESTRUN, + "open_mode": True, } if platform.is_windows(): config_overrides[ From 486b67f320c8a9954676488441136b0d3379b710 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 21 Nov 2023 08:32:59 +0000 Subject: [PATCH 144/196] Rerun test failures in package tests Signed-off-by: Pedro Algarvio --- noxfile.py | 69 ++++++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 59 insertions(+), 10 deletions(-) diff --git a/noxfile.py b/noxfile.py index d53cd3dfef7..fddcf357f3e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1879,10 +1879,6 @@ def ci_test_onedir_pkgs(session): chunk = session.posargs.pop(0) cmd_args = chunks[chunk] - junit_report_filename = f"test-results-{chunk}" - runtests_log_filename = f"runtests-{chunk}" - - pydir = _get_pydir(session) if IS_LINUX: # Fetch the toolchain @@ -1904,12 +1900,39 @@ def ci_test_onedir_pkgs(session): + [ "-c", str(REPO_ROOT / "pkg-tests-pytest.ini"), - f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml", - f"--log-file=artifacts/logs/{runtests_log_filename}.log", + f"--junitxml=artifacts/xml-unittests-output/test-results-{chunk}.xml", + f"--log-file=artifacts/logs/runtests-{chunk}.log", ] + session.posargs ) - _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + try: + _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + except CommandFailed: + + # Don't print the system information, not the test selection on reruns + global PRINT_TEST_SELECTION + global PRINT_SYSTEM_INFO + PRINT_TEST_SELECTION = False + PRINT_SYSTEM_INFO = False + + pytest_args = ( + cmd_args[:] + + [ + "-c", + str(REPO_ROOT / "pkg-tests-pytest.ini"), + f"--junitxml=artifacts/xml-unittests-output/test-results-{chunk}-rerun.xml", + f"--log-file=artifacts/logs/runtests-{chunk}-rerun.log", + "--lf", + ] + + session.posargs + ) + _pytest( + session, + coverage=False, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) if chunk not in ("install", "download-pkgs"): cmd_args = chunks["install"] @@ -1919,8 +1942,8 @@ def ci_test_onedir_pkgs(session): "-c", str(REPO_ROOT / "pkg-tests-pytest.ini"), "--no-install", - f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml", - f"--log-file=artifacts/logs/{runtests_log_filename}.log", + f"--junitxml=artifacts/xml-unittests-output/test-results-install.xml", + f"--log-file=artifacts/logs/runtests-install.log", ] + session.posargs ) @@ -1928,5 +1951,31 @@ def ci_test_onedir_pkgs(session): pytest_args.append("--use-prev-version") if chunk in ("upgrade-classic", "downgrade-classic"): pytest_args.append("--classic") - _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + try: + _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + except CommandFailed: + cmd_args = chunks["install"] + pytest_args = ( + cmd_args[:] + + [ + "-c", + str(REPO_ROOT / "pkg-tests-pytest.ini"), + "--no-install", + f"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml", + f"--log-file=artifacts/logs/runtests-install-rerun.log", + "--lf", + ] + + session.posargs + ) + if "downgrade" in chunk: + pytest_args.append("--use-prev-version") + if chunk in ("upgrade-classic", "downgrade-classic"): + pytest_args.append("--classic") + _pytest( + session, + coverage=False, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) sys.exit(0) From 9afc4ca7d32693b277266a5e8aed28fe018aee61 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 27 Sep 2023 17:14:42 -0400 Subject: [PATCH 145/196] Add test for `pillar_rend=True` --- .../utils/jinja/test_salt_cache_loader.py | 30 ++++++++++++++----- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py index e0f5fa158ff..9f3eb63bf1a 100644 --- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py +++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py @@ -2,7 +2,6 @@ Tests for salt.utils.jinja """ -import copy import os import pytest @@ -25,7 +24,7 @@ def minion_opts(tmp_path, minion_opts): "file_buffer_size": 1048576, "cachedir": str(tmp_path), "file_roots": {"test": [str(tmp_path / "files" / "test")]}, - "pillar_roots": {"test": [str(tmp_path / "files" / "test")]}, + "pillar_roots": {"test": [str(tmp_path / "pillar" / "test")]}, "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), @@ -108,7 +107,7 @@ def get_loader(mock_file_client, minion_opts): if opts is None: opts = minion_opts mock_file_client.opts = opts - loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client) + loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client, **kwargs) # Create a mock file client and attach it to the loader return loader @@ -128,10 +127,27 @@ def test_searchpath(minion_opts, get_loader, tmp_path): """ The searchpath is based on the cachedir option and the saltenv parameter """ - opts = copy.deepcopy(minion_opts) - opts.update({"cachedir": str(tmp_path)}) - loader = get_loader(opts=minion_opts, saltenv="test") - assert loader.searchpath == [str(tmp_path / "files" / "test")] + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + assert loader.searchpath == minion_opts["file_roots"][saltenv] + + +def test_searchpath_pillar_rend(minion_opts, get_loader): + """ + The searchpath is based on the pillar_rend if it is True + """ + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True) + assert loader.searchpath == minion_opts["pillar_roots"][saltenv] + + +def test_searchpath_bad_pillar_rend(minion_opts, get_loader): + """ + The searchpath is based on the pillar_rend if it is True + """ + saltenv = "bad_env" + loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True) + assert loader.searchpath == [] def test_mockclient(minion_opts, template_dir, hello_simple, get_loader): From 018dbfacab5193fed0a5d288873e212e25c10f5a Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 29 Sep 2023 17:15:25 -0400 Subject: [PATCH 146/196] Add full coverage for `SaltCacheLoader` --- salt/utils/jinja.py | 3 +- .../utils/jinja/test_salt_cache_loader.py | 78 +++++++++++++++++-- 2 files changed, 71 insertions(+), 10 deletions(-) diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py index d90957a0087..898c8d3fc0d 100644 --- a/salt/utils/jinja.py +++ b/salt/utils/jinja.py @@ -127,7 +127,7 @@ class SaltCacheLoader(BaseLoader): the importing file. """ - # FIXME: somewhere do seprataor replacement: '\\' => '/' + # FIXME: somewhere do separator replacement: '\\' => '/' _template = template if template.split("/", 1)[0] in ("..", "."): is_relative = True @@ -136,7 +136,6 @@ class SaltCacheLoader(BaseLoader): # checks for relative '..' paths that step-out of file_roots if is_relative: # Starts with a relative path indicator - if not environment or "tpldir" not in environment.globals: log.warning( 'Relative path "%s" cannot be resolved without an environment', diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py index 9f3eb63bf1a..c4a34f5486b 100644 --- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py +++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py @@ -5,7 +5,7 @@ Tests for salt.utils.jinja import os import pytest -from jinja2 import Environment, exceptions +from jinja2 import Environment, TemplateNotFound, exceptions # dateutils is needed so that the strftime jinja filter is loaded import salt.utils.dateutils # pylint: disable=unused-import @@ -14,7 +14,7 @@ import salt.utils.json # pylint: disable=unused-import import salt.utils.stringutils # pylint: disable=unused-import import salt.utils.yaml # pylint: disable=unused-import from salt.utils.jinja import SaltCacheLoader -from tests.support.mock import Mock, call, patch +from tests.support.mock import MagicMock, call, patch @pytest.fixture @@ -224,7 +224,7 @@ def test_cached_file_client(get_loader, minion_opts): """ Multiple instantiations of SaltCacheLoader use the cached file client """ - with patch("salt.channel.client.ReqChannel.factory", Mock()): + with patch("salt.channel.client.ReqChannel.factory", MagicMock()): loader_a = SaltCacheLoader(minion_opts) loader_b = SaltCacheLoader(minion_opts) assert loader_a._file_client is loader_b._file_client @@ -246,7 +246,7 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): file_client does not have a destroy method """ # Test SaltCacheLoader creating and destroying the file client created - file_client = Mock() + file_client = MagicMock() with patch("salt.fileclient.get_file_client", return_value=file_client): loader = SaltCacheLoader(minion_opts) assert loader._file_client is None @@ -256,9 +256,9 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): assert file_client.mock_calls == [call.destroy()] # Test SaltCacheLoader reusing the file client passed - file_client = Mock() + file_client = MagicMock() file_client.opts = {"file_roots": minion_opts["file_roots"]} - with patch("salt.fileclient.get_file_client", return_value=Mock()): + with patch("salt.fileclient.get_file_client", return_value=MagicMock()): loader = SaltCacheLoader(minion_opts, _file_client=file_client) assert loader._file_client is file_client with loader: @@ -270,9 +270,9 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): # passed because the "file_roots" option is different, and, as such, # the destroy method on the new file client is called, but not on the # file client passed in. - file_client = Mock() + file_client = MagicMock() file_client.opts = {"file_roots": ""} - new_file_client = Mock() + new_file_client = MagicMock() with patch("salt.fileclient.get_file_client", return_value=new_file_client): loader = SaltCacheLoader(minion_opts, _file_client=file_client) assert loader._file_client is file_client @@ -282,3 +282,65 @@ def test_cache_loader_passed_file_client(minion_opts, mock_file_client): assert loader._file_client is None assert file_client.mock_calls == [] assert new_file_client.mock_calls == [call.destroy()] + + +def test_check_cache_miss(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with patch.object(loader, "cached", []): + with patch.object(loader, "cache_file") as cache_mock: + loader.check_cache(str(hello_simple)) + cache_mock.assert_called_once() + + +def test_check_cache_hit(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with patch.object(loader, "cached", [str(hello_simple)]): + with patch.object(loader, "cache_file") as cache_mock: + loader.check_cache(str(hello_simple)) + cache_mock.assert_not_called() + + +def test_get_source_no_environment( + get_loader, minion_opts, relative_rhello, relative_dir +): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source(None, str(".." / relative_rhello.relative_to(relative_dir))) + + +def test_get_source_relative_no_tpldir( + get_loader, minion_opts, relative_rhello, relative_dir +): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source( + MagicMock(globals=[]), str(".." / relative_rhello.relative_to(relative_dir)) + ) + + +def test_get_source_template_doesnt_exist(get_loader, minion_opts): + saltenv = "test" + fake_path = "fake_path" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source(None, fake_path) + + +def test_get_source_template_removed(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + contents, filepath, uptodate = loader.get_source(None, str(hello_simple)) + hello_simple.unlink() + assert uptodate() is False + + +def test_no_destroy_method_on_file_client(get_loader, minion_opts): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + loader._close_file_client = True + # This should fail silently, thus no error catching + loader.destroy() From 907e33436168bd48d6d3a3595c624498f150f153 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Fri, 29 Sep 2023 18:52:53 -0400 Subject: [PATCH 147/196] Add some more filter tests in `test_jinja_filters.py` --- .../modules/state/test_jinja_filters.py | 137 +++++++++++++++++- 1 file changed, 133 insertions(+), 4 deletions(-) diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py index 59777cee196..99bae5f0517 100644 --- a/tests/pytests/functional/modules/state/test_jinja_filters.py +++ b/tests/pytests/functional/modules/state/test_jinja_filters.py @@ -499,6 +499,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="avg_not_list", + expected={"ret": 2.0}, + sls=""" + {% set result = 2 | avg() %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="difference", expected={"ret": [1, 3]}, @@ -653,6 +664,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="union_hashable", + expected={"ret": [1, 2, 3, 4, 6]}, + sls=""" + {% set result = (1, 2, 3, 4) | union((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="unique", expected={"ret": ["a", "b", "c"]}, @@ -929,6 +951,109 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="raise", + expected={"ret": {"Question": "Quieres Café?"}}, + sls=""" + {{ raise('Custom Error') }} + """, + ), + Filter( + name="match", + expected={"ret": "match"}, + sls=""" + {% if 'a' is match('[a-b]') %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="no_match", + expected={"ret": "no match"}, + sls=""" + {% if 'c' is match('[a-b]') %} + {% set result = 'match' %} + {% else %} + {% set result = 'no match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="match_ignorecase", + expected={"ret": "match"}, + sls=""" + {% if 'A' is match('[a-b]', True) %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + # The muiltiline flag doesn't make sense for `match`, we should deprecate it + Filter( + name="match_multiline", + expected={"ret": "match"}, + sls=""" + {% set ml_string = 'this is a multiline\nstring' %} + {% if ml_string is match('.*\n^string', False, True) %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="equalto", + expected={"ret": "equal"}, + sls=""" + {% if 1 is equalto(1) %} + {% set result = 'equal' %} + {% else %} + {% set result = 'not equal' %} + {% endif %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="un_equalto", + expected={"ret": "not equal"}, + sls=""" + {% if 1 is equalto(2) %} + {% set result = 'equal' %} + {% else %} + {% set result = 'not equal' %} + {% endif %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), ], ids=_filter_id, ) @@ -945,7 +1070,11 @@ def test_filter(state, state_tree, filter, grains): with filter(state_tree): ret = state.sls("filter") log.debug("state.sls returned: %s", ret) - assert not ret.failed - for state_result in ret: - assert state_result.result is True - filter.assert_result(state_result.changes) + if filter.name == "raise": + assert ret.failed + assert "TemplateError" in ret.errors[0] + else: + assert not ret.failed + for state_result in ret: + assert state_result.result is True + filter.assert_result(state_result.changes) From 83ab0d88078a6bc051fe58dae0f572136037d0ef Mon Sep 17 00:00:00 2001 From: MKLeb Date: Tue, 3 Oct 2023 19:36:10 -0400 Subject: [PATCH 148/196] Add full coverage for the jinja filters --- .../modules/state/test_jinja_filters.py | 166 +++++++++++++++++- 1 file changed, 165 insertions(+), 1 deletion(-) diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py index 99bae5f0517..38135ac967b 100644 --- a/tests/pytests/functional/modules/state/test_jinja_filters.py +++ b/tests/pytests/functional/modules/state/test_jinja_filters.py @@ -521,6 +521,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="difference_hashable", + expected={"ret": [1, 3]}, + sls=""" + {% set result = (1, 2, 3, 4) | difference((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="intersect", expected={"ret": [2, 4]}, @@ -532,6 +543,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="intersect_hashable", + expected={"ret": [2, 4]}, + sls=""" + {% set result = (1, 2, 3, 4) | intersect((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="max", expected={"ret": 4}, @@ -580,6 +602,28 @@ def _filter_id(value): name="regex_match", expected={"ret": "('a', 'd')"}, sls=""" + {% set result = 'abcd' | regex_match('^(.*)bc(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_match_no_match", + expected={"ret": "None"}, + sls=""" + {% set result = 'abcd' | regex_match('^(.*)BC(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_match_ignorecase", + expected={"ret": "('a', 'd')"}, + sls=""" {% set result = 'abcd' | regex_match('^(.*)BC(.*)$', ignorecase=True) %} test: module.run: @@ -587,6 +631,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_match_multiline", + expected={"ret": "('foo1',)"}, + sls=""" + {% set result = 'foo1\nfoo2\n' | regex_match('(foo.$)', multiline=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="regex_replace", expected={"ret": "lets__replace__spaces"}, @@ -598,10 +653,65 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_replace_no_match", + expected={"ret": "lets replace spaces"}, + sls=r""" + {% set result = 'lets replace spaces' | regex_replace('\s+$', '__') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_replace_ignorecase", + expected={"ret": "barbar"}, + sls=r""" + {% set result = 'FOO1foo2' | regex_replace('foo.', 'bar', ignorecase=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_replace_multiline", + expected={"ret": "bar bar "}, + sls=r""" + {% set result = 'FOO1\nfoo2\n' | regex_replace('^foo.$', 'bar', ignorecase=True, multiline=True) %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), Filter( name="regex_search", expected={"ret": "('a', 'd')"}, sls=""" + {% set result = 'abcd' | regex_search('^(.*)bc(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_search_no_match", + expected={"ret": "None"}, + sls=""" + {% set result = 'abcd' | regex_search('^(.*)BC(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_search_ignorecase", + expected={"ret": "('a', 'd')"}, + sls=""" {% set result = 'abcd' | regex_search('^(.*)BC(.*)$', ignorecase=True) %} test: module.run: @@ -609,6 +719,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_search_multiline", + expected={"ret": "('foo1',)"}, + sls=""" + {% set result = 'foo1\nfoo2\n' | regex_search('(foo.$)', multiline=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="sequence", expected={"ret": ["Salt Rocks!"]}, @@ -642,6 +763,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="symmetric_difference_hashable", + expected={"ret": [1, 3, 6]}, + sls=""" + {% set result = (1, 2, 3, 4) | symmetric_difference((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="to_bool", expected={"ret": True}, @@ -653,6 +785,39 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="to_bool_none", + expected={"ret": "False"}, + sls=""" + {% set result = 'None' | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), + Filter( + name="to_bool_given_bool", + expected={"ret": "True"}, + sls=""" + {% set result = true | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), + Filter( + name="to_bool_not_hashable", + expected={"ret": "True"}, + sls=""" + {% set result = ['hello', 'world'] | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), Filter( name="union", expected={"ret": [1, 2, 3, 4, 6]}, @@ -1006,7 +1171,6 @@ def _filter_id(value): - text: {{ result }} """, ), - # The muiltiline flag doesn't make sense for `match`, we should deprecate it Filter( name="match_multiline", expected={"ret": "match"}, From 55b1ff7b64bacb51e2c6e28afd1a38d5a182bbc2 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 4 Oct 2023 12:43:25 -0400 Subject: [PATCH 149/196] Migrate `tests/integration/renderers/test_jinja.py` to pytest --- tests/integration/renderers/test_jinja.py | 36 ------------------- .../integration/renderers/test_jinja.py | 36 +++++++++++++++++++ 2 files changed, 36 insertions(+), 36 deletions(-) delete mode 100644 tests/integration/renderers/test_jinja.py create mode 100644 tests/pytests/integration/renderers/test_jinja.py diff --git a/tests/integration/renderers/test_jinja.py b/tests/integration/renderers/test_jinja.py deleted file mode 100644 index f0fcd28ff9d..00000000000 --- a/tests/integration/renderers/test_jinja.py +++ /dev/null @@ -1,36 +0,0 @@ -import os - -import pytest - -import salt.utils.files -from tests.support.case import ModuleCase, ShellCase -from tests.support.helpers import with_tempdir - - -class JinjaRendererTest(ModuleCase): - @with_tempdir() - @pytest.mark.slow_test - def test_issue_54765(self, tmpdir): - file_path = os.path.join(tmpdir, "issue-54765") - ret = self.run_function( - "state.sls", mods="issue-54765", pillar={"file_path": file_path} - ) - key = "file_|-issue-54765_|-{}_|-managed".format(file_path) - assert key in ret - assert ret[key]["result"] is True - with salt.utils.files.fopen(file_path, "r") as fp: - assert fp.read().strip() == "bar" - - -class JinjaRenderCallTest(ShellCase): - @with_tempdir() - @pytest.mark.slow_test - def test_issue_54765(self, tmpdir): - file_path = os.path.join(tmpdir, "issue-54765") - pillar_str = '\'{{"file_path": "{}"}}\''.format(file_path) - ret = self.run_call( - "state.apply issue-54765 pillar={}".format(pillar_str), local=True - ) - assert " Result: True" in ret - with salt.utils.files.fopen(file_path, "r") as fp: - assert fp.read().strip() == "bar" diff --git a/tests/pytests/integration/renderers/test_jinja.py b/tests/pytests/integration/renderers/test_jinja.py new file mode 100644 index 00000000000..1a902e2047e --- /dev/null +++ b/tests/pytests/integration/renderers/test_jinja.py @@ -0,0 +1,36 @@ +import pytest + +import salt.utils.files + +pytestmark = [ + pytest.mark.slow_test, +] + + +def test_issue_54765_salt(tmp_path, salt_cli, salt_minion): + file_path = str(tmp_path / "issue-54765") + ret = salt_cli.run( + "state.sls", + mods="issue-54765", + pillar={"file_path": file_path}, + minion_tgt=salt_minion.id, + ).data + key = "file_|-issue-54765_|-{}_|-managed".format(file_path) + assert key in ret + assert ret[key]["result"] is True + with salt.utils.files.fopen(file_path, "r") as fp: + assert fp.read().strip() == "bar" + + +def test_issue_54765_call(tmp_path, salt_call_cli): + file_path = str(tmp_path / "issue-54765") + ret = salt_call_cli.run( + "--local", + "state.apply", + "issue-54765", + pillar=f"{{'file_path': '{file_path}'}}", + ) + key = "file_|-issue-54765_|-{}_|-managed".format(file_path) + assert ret.data[key]["result"] is True + with salt.utils.files.fopen(file_path, "r") as fp: + assert fp.read().strip() == "bar" From 4c7f477d804f42138540692c21c8e64de9f6c0f2 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Wed, 4 Oct 2023 13:07:03 -0400 Subject: [PATCH 150/196] Migrate `tests/integration/modules/test_jinja.py` to pytest --- tests/integration/modules/test_jinja.py | 76 ------------------- .../pytests/integration/modules/test_jinja.py | 64 ++++++++++++++++ 2 files changed, 64 insertions(+), 76 deletions(-) delete mode 100644 tests/integration/modules/test_jinja.py create mode 100644 tests/pytests/integration/modules/test_jinja.py diff --git a/tests/integration/modules/test_jinja.py b/tests/integration/modules/test_jinja.py deleted file mode 100644 index 70b45bf0f23..00000000000 --- a/tests/integration/modules/test_jinja.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -Test the jinja module -""" - -import os - -import salt.utils.files -import salt.utils.json -import salt.utils.yaml -from tests.support.case import ModuleCase -from tests.support.helpers import requires_system_grains -from tests.support.runtests import RUNTIME_VARS - - -class TestModulesJinja(ModuleCase): - """ - Test the jinja map module - """ - - def _path(self, name, absolute=False): - path = os.path.join("modules", "jinja", name) - if absolute: - return os.path.join(RUNTIME_VARS.BASE_FILES, path) - else: - return path - - def test_import_json(self): - json_file = "osarchmap.json" - ret = self.run_function("jinja.import_json", [self._path(json_file)]) - with salt.utils.files.fopen(self._path(json_file, absolute=True)) as fh_: - self.assertDictEqual(salt.utils.json.load(fh_), ret) - - def test_import_yaml(self): - yaml_file = "defaults.yaml" - ret = self.run_function("jinja.import_yaml", [self._path(yaml_file)]) - with salt.utils.files.fopen(self._path(yaml_file, absolute=True)) as fh_: - self.assertDictEqual(salt.utils.yaml.safe_load(fh_), ret) - - @requires_system_grains - def test_load_map(self, grains): - ret = self.run_function("jinja.load_map", [self._path("map.jinja"), "template"]) - - assert isinstance( - ret, dict - ), "failed to return dictionary from jinja.load_map: {}".format(ret) - - with salt.utils.files.fopen(self._path("defaults.yaml", absolute=True)) as fh_: - defaults = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen(self._path("osarchmap.json", absolute=True)) as fh_: - osarchmap = salt.utils.json.load(fh_) - with salt.utils.files.fopen( - self._path("osfamilymap.yaml", absolute=True) - ) as fh_: - osfamilymap = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen(self._path("osmap.yaml", absolute=True)) as fh_: - osmap = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen( - self._path("osfingermap.yaml", absolute=True) - ) as fh_: - osfingermap = salt.utils.yaml.safe_load(fh_) - - self.assertEqual( - ret.get("arch"), osarchmap.get(grains["osarch"], {}).get("arch") - ) - self.assertEqual( - ret.get("config"), - osfingermap.get(grains["osfinger"], {}).get( - "config", - osmap.get(grains["os"], {}).get( - "config", - osfamilymap.get(grains["os_family"], {}).get( - "config", defaults.get("template").get("config") - ), - ), - ), - ) diff --git a/tests/pytests/integration/modules/test_jinja.py b/tests/pytests/integration/modules/test_jinja.py new file mode 100644 index 00000000000..0ae98dbf7dc --- /dev/null +++ b/tests/pytests/integration/modules/test_jinja.py @@ -0,0 +1,64 @@ +""" +Test the jinja module +""" + +import os + +import salt.utils.files +import salt.utils.json +import salt.utils.yaml +from tests.support.runtests import RUNTIME_VARS + + +def _path(name, absolute=False): + path = os.path.join("modules", "jinja", name) + if absolute: + return os.path.join(RUNTIME_VARS.BASE_FILES, path) + else: + return path + + +def test_import_json(salt_cli, salt_minion): + json_file = "osarchmap.json" + ret = salt_cli.run("jinja.import_json", _path(json_file), minion_tgt=salt_minion.id) + with salt.utils.files.fopen(_path(json_file, absolute=True)) as fh_: + assert salt.utils.json.load(fh_) == ret.data + + +def test_import_yaml(salt_cli, salt_minion): + yaml_file = "defaults.yaml" + ret = salt_cli.run("jinja.import_yaml", _path(yaml_file), minion_tgt=salt_minion.id) + with salt.utils.files.fopen(_path(yaml_file, absolute=True)) as fh_: + assert salt.utils.yaml.safe_load(fh_) == ret.data + + +def test_load_map(grains, salt_cli, salt_minion): + ret = salt_cli.run( + "jinja.load_map", _path("map.jinja"), "template", minion_tgt=salt_minion.id + ) + + assert isinstance( + ret.data, dict + ), "failed to return dictionary from jinja.load_map: {}".format(ret) + + with salt.utils.files.fopen(_path("defaults.yaml", absolute=True)) as fh_: + defaults = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osarchmap.json", absolute=True)) as fh_: + osarchmap = salt.utils.json.load(fh_) + with salt.utils.files.fopen(_path("osfamilymap.yaml", absolute=True)) as fh_: + osfamilymap = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osmap.yaml", absolute=True)) as fh_: + osmap = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osfingermap.yaml", absolute=True)) as fh_: + osfingermap = salt.utils.yaml.safe_load(fh_) + + assert ret.data.get("arch") == osarchmap.get(grains["osarch"], {}).get("arch") + assert ret.data.get("config") == osfingermap.get(grains["osfinger"], {}).get( + "config", + osmap.get(grains["os"], {}).get( + "config", + osfamilymap.get(grains["os_family"], {}).get( + "config", defaults.get("template").get("config") + ), + ), + ) From 7370733bae8ccd47440bd08de214b8e9756fdc28 Mon Sep 17 00:00:00 2001 From: MKLeb Date: Mon, 16 Oct 2023 13:34:41 -0400 Subject: [PATCH 151/196] Fix windows tests --- tests/pytests/unit/utils/jinja/test_salt_cache_loader.py | 2 +- tools/testsuite/download.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py index c4a34f5486b..be68660bccf 100644 --- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py +++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py @@ -318,7 +318,7 @@ def test_get_source_relative_no_tpldir( loader = get_loader(opts=minion_opts, saltenv=saltenv) with pytest.raises(TemplateNotFound): loader.get_source( - MagicMock(globals=[]), str(".." / relative_rhello.relative_to(relative_dir)) + MagicMock(globals={}), str(".." / relative_rhello.relative_to(relative_dir)) ) diff --git a/tools/testsuite/download.py b/tools/testsuite/download.py index cd6d51aa5fc..edd7652125b 100644 --- a/tools/testsuite/download.py +++ b/tools/testsuite/download.py @@ -190,7 +190,7 @@ def download_artifact( repository: str = "saltstack/salt", ): """ - Download CI built packages artifacts. + Download CI artifacts. """ if TYPE_CHECKING: assert artifact_name is not None From 92a9707420c7be52016370126251475bf5c0f322 Mon Sep 17 00:00:00 2001 From: Shane Lee Date: Fri, 17 Nov 2023 12:12:23 -0700 Subject: [PATCH 152/196] Put cleanup in a try/except block If it fails to cleanup the PAexec binaries, it should still continue --- changelog/65584.fixed.md | 2 ++ salt/utils/cloud.py | 9 +++++-- tests/pytests/unit/utils/test_cloud.py | 33 ++++++++++++++++++++++++-- 3 files changed, 40 insertions(+), 4 deletions(-) create mode 100644 changelog/65584.fixed.md diff --git a/changelog/65584.fixed.md b/changelog/65584.fixed.md new file mode 100644 index 00000000000..1da48b32bb0 --- /dev/null +++ b/changelog/65584.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue where Salt Cloud would fail if it could not delete lingering +PAexec binaries diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py index a0843130593..3e026a0bb57 100644 --- a/salt/utils/cloud.py +++ b/salt/utils/cloud.py @@ -63,7 +63,7 @@ try: from pypsexec.client import Client as PsExecClient from pypsexec.exceptions import SCMRException from pypsexec.scmr import Service as ScmrService - from smbprotocol.exceptions import SMBResponseException + from smbprotocol.exceptions import CannotDelete, SMBResponseException from smbprotocol.tree import TreeConnect logging.getLogger("smbprotocol").setLevel(logging.WARNING) @@ -910,7 +910,12 @@ class Client: return self._client.connect() def disconnect(self): - self._client.cleanup() # This removes the lingering PAExec binary + try: + # This removes any lingering PAExec binaries + self._client.cleanup() + except CannotDelete as exc: + # We shouldn't hard crash here, so just log the error + log.debug("Exception cleaning up PAexec: %r", exc) return self._client.disconnect() def create_service(self): diff --git a/tests/pytests/unit/utils/test_cloud.py b/tests/pytests/unit/utils/test_cloud.py index ea55af44ef5..0bfe6d28ce6 100644 --- a/tests/pytests/unit/utils/test_cloud.py +++ b/tests/pytests/unit/utils/test_cloud.py @@ -13,6 +13,13 @@ import tempfile import pytest +try: + from smbprotocol.exceptions import CannotDelete + + HAS_PSEXEC = True +except ImportError: + HAS_PSEXEC = False + import salt.utils.cloud as cloud from salt.exceptions import SaltCloudException from salt.utils.cloud import __ssh_gateway_arguments as ssh_gateway_arguments @@ -208,7 +215,8 @@ def test_deploy_windows_custom_port(): mock.assert_called_once_with("test", "Administrator", None, 1234) -def test_run_psexec_command_cleanup_lingering_paexec(): +@pytest.mark.skipif(not HAS_PSEXEC, reason="Missing SMB Protocol Library") +def test_run_psexec_command_cleanup_lingering_paexec(caplog): pytest.importorskip("pypsexec.client", reason="Requires PyPsExec") mock_psexec = patch("salt.utils.cloud.PsExecClient", autospec=True) mock_scmr = patch("salt.utils.cloud.ScmrService", autospec=True) @@ -232,11 +240,32 @@ def test_run_psexec_command_cleanup_lingering_paexec(): ) mock_client.return_value.cleanup.assert_called_once() + # Testing handling an error when it can't delete the PAexec binary + with mock_scmr, mock_rm_svc, mock_psexec as mock_client: + mock_client.return_value.session = MagicMock(username="Gary") + mock_client.return_value.connection = MagicMock(server_name="Krabbs") + mock_client.return_value.run_executable.return_value = ( + "Sandy", + "MermaidMan", + "BarnicleBoy", + ) + mock_client.return_value.cleanup = MagicMock(side_effect=CannotDelete()) + + cloud.run_psexec_command( + "spongebob", + "squarepants", + "patrick", + "squidward", + "plankton", + ) + assert "Exception cleaning up PAexec:" in caplog.text + mock_client.return_value.disconnect.assert_called_once() + @pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.") def test_deploy_windows_programdata(): """ - Test deploy_windows with a custom port + Test deploy_windows to ProgramData """ mock_true = MagicMock(return_value=True) mock_tuple = MagicMock(return_value=(0, 0, 0)) From 1c715ecf40359687fde0e19c66a9dd83c1695447 Mon Sep 17 00:00:00 2001 From: Sander Cornelissen <5145555+sanderc85@users.noreply.github.com> Date: Thu, 26 Oct 2023 09:43:46 +0200 Subject: [PATCH 153/196] Fix for pip state when user doesn't exist (cherry picked from commit ee3d8924ac4848bc6085a3767245bd30d5f20e0e) --- salt/states/pip_state.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py index 39c13acb786..9430ea457ce 100644 --- a/salt/states/pip_state.py +++ b/salt/states/pip_state.py @@ -820,6 +820,13 @@ def installed( ret["comment"] = "\n".join(comments) return ret + # If the user does not exist, stop here with error: + if user and "user.info" in __salt__ and not __salt__["user.info"](user): + # The user does not exists, exit with result set to False + ret["result"] = False + ret["comment"] = f"User {user} does not exist" + return ret + # If a requirements file is specified, only install the contents of the # requirements file. Similarly, using the --editable flag with pip should # also ignore the "name" and "pkgs" parameters. From 018f3260ffd4e935bebb830bc524ef10142c5106 Mon Sep 17 00:00:00 2001 From: Sander Cornelissen <5145555+sanderc85@users.noreply.github.com> Date: Thu, 26 Oct 2023 09:44:03 +0200 Subject: [PATCH 154/196] Add test for fix when user does not exists on pip (cherry picked from commit f36b821e1af3efe65d14f97a1e4e0bdd99c477a8) --- tests/unit/states/test_pip_state.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py index 5e4b6e0af14..98942d58b95 100644 --- a/tests/unit/states/test_pip_state.py +++ b/tests/unit/states/test_pip_state.py @@ -379,6 +379,24 @@ class PipStateTest(TestCase, SaltReturnAssertsMixin, LoaderModuleMockMixin): self.assertSaltTrueReturn({"test": ret}) self.assertInSaltComment("successfully installed", {"test": ret}) + def test_install_with_specified_user(self): + """ + Check that if `user` parameter is set and the user does not exists + it will fail with an error, see #65458 + """ + user_info = MagicMock(return_value={}) + pip_version = MagicMock(return_value="10.0.1") + with patch.dict( + pip_state.__salt__, + { + "user.info": user_info, + "pip.version": pip_version, + }, + ): + ret = pip_state.installed("mypkg", user="fred") + self.assertSaltFalseReturn({"test": ret}) + self.assertInSaltComment("User fred does not exist", {"test": ret}) + class PipStateUtilsTest(TestCase): def test_has_internal_exceptions_mod_function(self): From 2ebdfa4b5a2cd53a9c4c9511e23cb9ed2beb503b Mon Sep 17 00:00:00 2001 From: Sander Cornelissen <5145555+sanderc85@users.noreply.github.com> Date: Fri, 27 Oct 2023 15:03:58 +0200 Subject: [PATCH 155/196] Add changelog for #65458 (cherry picked from commit 15cef7dbab279abe2f532b278baee2654fef67f8) --- changelog/65458.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/65458.fixed.md diff --git a/changelog/65458.fixed.md b/changelog/65458.fixed.md new file mode 100644 index 00000000000..61cc57df9ca --- /dev/null +++ b/changelog/65458.fixed.md @@ -0,0 +1 @@ +pip.installed state will now properly fail when a specified user does not exists From 86fab4e35839c65bb6d36416df73384584054db8 Mon Sep 17 00:00:00 2001 From: Sander Cornelissen <5145555+sanderc85@users.noreply.github.com> Date: Fri, 27 Oct 2023 13:36:41 +0200 Subject: [PATCH 156/196] Fix pylint issues in unit test for pip state (cherry picked from commit a686ce00b240d97410fed9f0d76e74c218da116c) --- tests/unit/states/test_pip_state.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py index 98942d58b95..dbac7b926e6 100644 --- a/tests/unit/states/test_pip_state.py +++ b/tests/unit/states/test_pip_state.py @@ -432,7 +432,7 @@ class PipStateInstallationErrorTest(TestCase): extra_requirements = [] for name, version in salt.version.dependency_information(): if name in ["PyYAML", "packaging", "looseversion"]: - extra_requirements.append("{}=={}".format(name, version)) + extra_requirements.append(f"{name}=={version}") failures = {} pip_version_requirements = [ # Latest pip 18 @@ -471,7 +471,7 @@ class PipStateInstallationErrorTest(TestCase): with VirtualEnv() as venv: venv.install(*extra_requirements) if requirement: - venv.install("pip{}".format(requirement)) + venv.install(f"pip{requirement}") try: subprocess.check_output([venv.venv_python, "-c", code]) except subprocess.CalledProcessError as exc: From e84c0473293da7862fa2f015fe695ea49919de96 Mon Sep 17 00:00:00 2001 From: "Ryan Addessi (raddessi)" Date: Fri, 16 Jun 2023 23:01:35 -0600 Subject: [PATCH 157/196] fix: file.directory state children_only kwarg did not work --- salt/states/file.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/salt/states/file.py b/salt/states/file.py index 9fce51867b9..c78c5c24ab5 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -721,6 +721,7 @@ def _check_directory( exclude_pat=None, max_depth=None, follow_symlinks=False, + children_only=False, ): """ Check what changes need to be made on a directory @@ -792,10 +793,12 @@ def _check_directory( ) if fchange: changes[path] = fchange - # Recurse skips root (we always do dirs, not root), so always check root: - fchange = _check_dir_meta(name, user, group, dir_mode, follow_symlinks) - if fchange: - changes[name] = fchange + # Recurse skips root (we always do dirs, not root), so check root unless + # children_only is specified: + if not children_only: + fchange = _check_dir_meta(name, user, group, dir_mode, follow_symlinks) + if fchange: + changes[name] = fchange if clean: keep = _gen_keep_files(name, require, walk_d) @@ -3954,6 +3957,7 @@ def directory( exclude_pat, max_depth, follow_symlinks, + children_only, ) if tchanges: From 1913f5dda800fad1da8a01987b9b342f99ba4e42 Mon Sep 17 00:00:00 2001 From: "Ryan Addessi (raddessi)" Date: Mon, 19 Jun 2023 18:08:47 -0600 Subject: [PATCH 158/196] add a test --- .../functional/states/file/test_directory.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index bb56f5416f2..d5d6fa731ca 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -113,6 +113,34 @@ def test_directory_max_depth(file, tmp_path): assert _mode == _get_oct_mode(untouched_dir) +def test_directory_children_only(file, tmp_path): + """ + file.directory with children_only=True + """ + name = tmp_path / "directory_children_only_dir" + name.mkdir(0o0700) + + strayfile = name / "strayfile" + strayfile.touch() + os.chmod(strayfile, 0o700) + + straydir = name / "straydir" + straydir.mkdir(0o0700) + + # none of the children nor parent are currently set to the correct mode + ret = file.directory( + name=str(name), + file_mode="0644", + dir_mode="0755", + recurse=["mode"], + children_only=True, + ) + assert ret.result is True + assert name.stat().st_mode is 0o0700 + assert strayfile.stat().st_mode is 0o0644 + assert straydir.stat().st_mode is 0o0755 + + def test_directory_clean(file, tmp_path): """ file.directory with clean=True From e0a91fc67c5d7287ccb3e38e6ac00aab2fbdd6cf Mon Sep 17 00:00:00 2001 From: "Ryan Addessi (raddessi)" Date: Mon, 19 Jun 2023 18:13:23 -0600 Subject: [PATCH 159/196] added changelod --- changelog/64497.fixed.md | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 changelog/64497.fixed.md diff --git a/changelog/64497.fixed.md b/changelog/64497.fixed.md new file mode 100644 index 00000000000..4dacc84e5d6 --- /dev/null +++ b/changelog/64497.fixed.md @@ -0,0 +1,2 @@ +Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword +argument was not being respected. \ No newline at end of file From 7d0707604008907907773b76f632796430d4ade5 Mon Sep 17 00:00:00 2001 From: "Ryan Addessi (raddessi)" Date: Mon, 19 Jun 2023 18:25:25 -0600 Subject: [PATCH 160/196] precommit --- changelog/64497.fixed.md | 2 +- tests/pytests/functional/states/file/test_directory.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/changelog/64497.fixed.md b/changelog/64497.fixed.md index 4dacc84e5d6..2d90737562d 100644 --- a/changelog/64497.fixed.md +++ b/changelog/64497.fixed.md @@ -1,2 +1,2 @@ Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword -argument was not being respected. \ No newline at end of file +argument was not being respected. diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index d5d6fa731ca..2881b76cafb 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -136,9 +136,9 @@ def test_directory_children_only(file, tmp_path): children_only=True, ) assert ret.result is True - assert name.stat().st_mode is 0o0700 - assert strayfile.stat().st_mode is 0o0644 - assert straydir.stat().st_mode is 0o0755 + assert name.stat().st_mode == 0o0700 + assert strayfile.stat().st_mode == 0o0644 + assert straydir.stat().st_mode == 0o0755 def test_directory_clean(file, tmp_path): From 3069df132b7572af114c03c1234387b5ef6629e9 Mon Sep 17 00:00:00 2001 From: Ryan Addessi Date: Tue, 26 Sep 2023 16:33:09 -0600 Subject: [PATCH 161/196] & 0o7777 --- tests/pytests/functional/states/file/test_directory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index 2881b76cafb..bc76f34fdad 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -136,9 +136,9 @@ def test_directory_children_only(file, tmp_path): children_only=True, ) assert ret.result is True - assert name.stat().st_mode == 0o0700 - assert strayfile.stat().st_mode == 0o0644 - assert straydir.stat().st_mode == 0o0755 + assert name.stat().st_mode & 0o7777 == 0o0700 + assert strayfile.stat().st_mode & 0o7777 == 0o0644 + assert straydir.stat().st_mode & 0o7777 == 0o0755 def test_directory_clean(file, tmp_path): From 5eb5d57ea0ecc1709927c33b570f79f58975daee Mon Sep 17 00:00:00 2001 From: Ryan Addessi Date: Tue, 26 Sep 2023 23:09:35 -0600 Subject: [PATCH 162/196] trying another method --- tests/pytests/functional/states/file/test_directory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index bc76f34fdad..2fb5666199a 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -136,9 +136,9 @@ def test_directory_children_only(file, tmp_path): children_only=True, ) assert ret.result is True - assert name.stat().st_mode & 0o7777 == 0o0700 - assert strayfile.stat().st_mode & 0o7777 == 0o0644 - assert straydir.stat().st_mode & 0o7777 == 0o0755 + assert oct(name.stat().st_mode)[-3:] == "700" + assert oct(strayfile.stat().st_mode)[-3:] == "644" + assert oct(straydir.stat().st_mode)[-3:] == "755" def test_directory_clean(file, tmp_path): From 7bc016f9fdad82a64bb4907beead1563f1b6ea81 Mon Sep 17 00:00:00 2001 From: Ryan Addessi Date: Wed, 27 Sep 2023 00:07:25 -0600 Subject: [PATCH 163/196] skip on windows --- .../functional/states/file/test_directory.py | 21 ++++++++++++++++--- 1 file changed, 18 insertions(+), 3 deletions(-) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index 2fb5666199a..b6752125ddd 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -117,6 +117,9 @@ def test_directory_children_only(file, tmp_path): """ file.directory with children_only=True """ + if IS_WINDOWS: + pytest.skip("Skipped on windows") + name = tmp_path / "directory_children_only_dir" name.mkdir(0o0700) @@ -136,9 +139,21 @@ def test_directory_children_only(file, tmp_path): children_only=True, ) assert ret.result is True - assert oct(name.stat().st_mode)[-3:] == "700" - assert oct(strayfile.stat().st_mode)[-3:] == "644" - assert oct(straydir.stat().st_mode)[-3:] == "755" + + # Assert parent directory's mode remains unchanged + assert oct(name.stat().st_mode)[-3:] == "700", ( + f"Expected mode 700 for {name}, got {oct(name.stat().st_mode)[-3:]}" + ) + + # Assert child file's mode is changed + assert oct(strayfile.stat().st_mode)[-3:] == "644", ( + f"Expected mode 644 for {strayfile}, got {oct(strayfile.stat().st_mode)[-3:]}" + ) + + # Assert child directory's mode is changed + assert oct(straydir.stat().st_mode)[-3:] == "755", ( + f"Expected mode 755 for {straydir}, got {oct(straydir.stat().st_mode)[-3:]}" + ) def test_directory_clean(file, tmp_path): From 12ff4bf963869bd018078a0a0d2afc031fed83a9 Mon Sep 17 00:00:00 2001 From: Ryan Addessi Date: Wed, 27 Sep 2023 00:15:28 -0600 Subject: [PATCH 164/196] black --- .../functional/states/file/test_directory.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index b6752125ddd..31c88aced4a 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -141,19 +141,19 @@ def test_directory_children_only(file, tmp_path): assert ret.result is True # Assert parent directory's mode remains unchanged - assert oct(name.stat().st_mode)[-3:] == "700", ( - f"Expected mode 700 for {name}, got {oct(name.stat().st_mode)[-3:]}" - ) + assert ( + oct(name.stat().st_mode)[-3:] == "700" + ), f"Expected mode 700 for {name}, got {oct(name.stat().st_mode)[-3:]}" # Assert child file's mode is changed - assert oct(strayfile.stat().st_mode)[-3:] == "644", ( - f"Expected mode 644 for {strayfile}, got {oct(strayfile.stat().st_mode)[-3:]}" - ) + assert ( + oct(strayfile.stat().st_mode)[-3:] == "644" + ), f"Expected mode 644 for {strayfile}, got {oct(strayfile.stat().st_mode)[-3:]}" # Assert child directory's mode is changed - assert oct(straydir.stat().st_mode)[-3:] == "755", ( - f"Expected mode 755 for {straydir}, got {oct(straydir.stat().st_mode)[-3:]}" - ) + assert ( + oct(straydir.stat().st_mode)[-3:] == "755" + ), f"Expected mode 755 for {straydir}, got {oct(straydir.stat().st_mode)[-3:]}" def test_directory_clean(file, tmp_path): From b06444317f3b0d8908f302af2b8fc0d8c3d00cfa Mon Sep 17 00:00:00 2001 From: Ryan Addessi Date: Tue, 21 Nov 2023 12:39:37 -0700 Subject: [PATCH 165/196] Update tests/pytests/functional/states/file/test_directory.py lgtm, ty Co-authored-by: Pedro Algarvio --- tests/pytests/functional/states/file/test_directory.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index 31c88aced4a..82a3f7f154c 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -113,12 +113,11 @@ def test_directory_max_depth(file, tmp_path): assert _mode == _get_oct_mode(untouched_dir) +@pytest.mark.skip_on_windows def test_directory_children_only(file, tmp_path): """ file.directory with children_only=True """ - if IS_WINDOWS: - pytest.skip("Skipped on windows") name = tmp_path / "directory_children_only_dir" name.mkdir(0o0700) From dcc9976d9b9fd7582394593adb6dc70647995679 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 14 Nov 2023 17:07:58 -0700 Subject: [PATCH 166/196] Warn on un-closed tranport clients --- salt/transport/base.py | 29 +++++++++++++++++++++++++---- salt/transport/tcp.py | 12 ++++++------ salt/transport/zeromq.py | 18 ++++++++++-------- 3 files changed, 41 insertions(+), 18 deletions(-) diff --git a/salt/transport/base.py b/salt/transport/base.py index 014a9731d59..30c57fb9f97 100644 --- a/salt/transport/base.py +++ b/salt/transport/base.py @@ -1,3 +1,6 @@ +import traceback +import warnings + import salt.ext.tornado.gen TRANSPORTS = ( @@ -94,14 +97,32 @@ def publish_client(opts, io_loop): raise Exception("Transport type not found: {}".format(ttype)) -class RequestClient: +class Transport: + def __init__(self, *args, **kwargs): + self._trace = "\n".join(traceback.format_stack()[:-1]) + if not hasattr(self, "_closing"): + self._closing = False + + # pylint: disable=W1701 + def __del__(self): + if not self._closing: + warnings.warn( + f"Unclosed transport {self!r} \n{self._trace}", + ResourceWarning, + source=self, + ) + + # pylint: enable=W1701 + + +class RequestClient(Transport): """ The RequestClient transport is used to make requests and get corresponding replies from the RequestServer. """ def __init__(self, opts, io_loop, **kwargs): - pass + super().__init__() @salt.ext.tornado.gen.coroutine def send(self, load, timeout=60): @@ -197,13 +218,13 @@ class DaemonizedPublishServer(PublishServer): raise NotImplementedError -class PublishClient: +class PublishClient(Transport): """ The PublishClient receives messages from the PublishServer and runs a callback. """ def __init__(self, opts, io_loop, **kwargs): - pass + super().__init__() def on_recv(self, callback): """ diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index 81454d0eab5..94912c89497 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -213,6 +213,7 @@ class TCPPubClient(salt.transport.base.PublishClient): ttype = "tcp" def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 + super().__init__(opts, io_loop, **kwargs) self.opts = opts self.io_loop = io_loop self.message_client = None @@ -228,12 +229,6 @@ class TCPPubClient(salt.transport.base.PublishClient): self.message_client.close() self.message_client = None - # pylint: disable=W1701 - def __del__(self): - self.close() - - # pylint: enable=W1701 - @salt.ext.tornado.gen.coroutine def connect(self, publish_port, connect_callback=None, disconnect_callback=None): self.publish_port = publish_port @@ -1038,6 +1033,7 @@ class TCPReqClient(salt.transport.base.RequestClient): ttype = "tcp" def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 + super().__init__(opts, io_loop, **kwargs) self.opts = opts self.io_loop = io_loop parse = urllib.parse.urlparse(self.opts["master_uri"]) @@ -1054,6 +1050,7 @@ class TCPReqClient(salt.transport.base.RequestClient): source_ip=opts.get("source_ip"), source_port=opts.get("source_ret_port"), ) + self._closing = False @salt.ext.tornado.gen.coroutine def connect(self): @@ -1065,4 +1062,7 @@ class TCPReqClient(salt.transport.base.RequestClient): raise salt.ext.tornado.gen.Return(ret) def close(self): + if self._closing: + return + self._closing = True self.message_client.close() diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index 54b8bf47ba7..12454216c24 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -529,14 +529,8 @@ class AsyncReqMessageClient: # wire up sockets self._init_socket() - # TODO: timeout all in-flight sessions, or error def close(self): - try: - if self._closing: - return - except AttributeError: - # We must have been called from __del__ - # The python interpreter has nuked most attributes already + if self._closing: return else: self._closing = True @@ -661,7 +655,10 @@ class ZeroMQSocketMonitor: def stop(self): if self._socket is None: return - self._socket.disable_monitor() + try: + self._socket.disable_monitor() + except zmq.Error: + pass self._socket = None self._monitor_socket = None if self._monitor_stream is not None: @@ -880,6 +877,7 @@ class RequestClient(salt.transport.base.RequestClient): ttype = "zeromq" def __init__(self, opts, io_loop): # pylint: disable=W0231 + super().__init__(opts, io_loop) self.opts = opts master_uri = self.get_master_uri(opts) self.message_client = AsyncReqMessageClient( @@ -887,6 +885,7 @@ class RequestClient(salt.transport.base.RequestClient): master_uri, io_loop=io_loop, ) + self._closing = False def connect(self): self.message_client.connect() @@ -898,6 +897,9 @@ class RequestClient(salt.transport.base.RequestClient): raise salt.ext.tornado.gen.Return(ret) def close(self): + if self._closing: + return + self._closing = True self.message_client.close() @staticmethod From d85644015cf1a461b3e77904ac617e64ca5ec5c1 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 14 Nov 2023 18:44:59 -0700 Subject: [PATCH 167/196] Only warn when connect was called --- salt/transport/base.py | 30 +++++++++++++++++---- salt/transport/tcp.py | 2 ++ salt/transport/zeromq.py | 9 +++++-- tests/pytests/unit/transport/test_base.py | 21 +++++++++++++++ tests/pytests/unit/transport/test_zeromq.py | 28 +++++++++++++++++++ 5 files changed, 83 insertions(+), 7 deletions(-) create mode 100644 tests/pytests/unit/transport/test_base.py diff --git a/salt/transport/base.py b/salt/transport/base.py index 30c57fb9f97..6fa6a5fee5d 100644 --- a/salt/transport/base.py +++ b/salt/transport/base.py @@ -97,18 +97,38 @@ def publish_client(opts, io_loop): raise Exception("Transport type not found: {}".format(ttype)) +class TransportWarning(Warning): + """ + Transport warning. + """ + + class Transport: def __init__(self, *args, **kwargs): self._trace = "\n".join(traceback.format_stack()[:-1]) if not hasattr(self, "_closing"): self._closing = False + if not hasattr(self, "_connect_called"): + self._connect_called = False + + def connect(self, *args, **kwargs): + self._connect_called = True # pylint: disable=W1701 def __del__(self): - if not self._closing: + """ + Warn the user if the transport's close method was never called. + + If the _closing attribute is missing we won't raise a warning. This + prevents issues when class's dunder init method is called with improper + arguments, and is later getting garbage collected. Users of this class + should take care to call super() and validate the functionality with a + test. + """ + if getattr(self, "_connect_called") and not getattr(self, "_closing", True): warnings.warn( - f"Unclosed transport {self!r} \n{self._trace}", - ResourceWarning, + f"Unclosed transport! {self!r} \n{self._trace}", + TransportWarning, source=self, ) @@ -137,7 +157,7 @@ class RequestClient(Transport): """ raise NotImplementedError - def connect(self): + def connect(self): # pylint: disable=W0221 """ Connect to the server / broker. """ @@ -233,7 +253,7 @@ class PublishClient(Transport): raise NotImplementedError @salt.ext.tornado.gen.coroutine - def connect(self, publish_port, connect_callback=None, disconnect_callback=None): + def connect(self, publish_port, connect_callback=None, disconnect_callback=None): # pylint: disable=W0221 """ Create a network connection to the the PublishServer or broker. """ diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index 94912c89497..2c3b5644fe6 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -231,6 +231,7 @@ class TCPPubClient(salt.transport.base.PublishClient): @salt.ext.tornado.gen.coroutine def connect(self, publish_port, connect_callback=None, disconnect_callback=None): + self._connect_called = True self.publish_port = publish_port self.message_client = MessageClient( self.opts, @@ -1054,6 +1055,7 @@ class TCPReqClient(salt.transport.base.RequestClient): @salt.ext.tornado.gen.coroutine def connect(self): + self._connect_called = True yield self.message_client.connect() @salt.ext.tornado.gen.coroutine diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index 12454216c24..e166d346926 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -207,6 +207,7 @@ class PublishClient(salt.transport.base.PublishClient): # TODO: this is the time to see if we are connected, maybe use the req channel to guess? @salt.ext.tornado.gen.coroutine def connect(self, publish_port, connect_callback=None, disconnect_callback=None): + self._connect_called = True self.publish_port = publish_port log.debug( "Connecting the Minion to the Master publish port, using the URI: %s", @@ -214,7 +215,8 @@ class PublishClient(salt.transport.base.PublishClient): ) log.debug("%r connecting to %s", self, self.master_pub) self._socket.connect(self.master_pub) - connect_callback(True) + if connect_callback is not None: + connect_callback(True) @property def master_pub(self): @@ -886,13 +888,16 @@ class RequestClient(salt.transport.base.RequestClient): io_loop=io_loop, ) self._closing = False + self._connect_called = False + @salt.ext.tornado.gen.coroutine def connect(self): + self._connect_called = True self.message_client.connect() @salt.ext.tornado.gen.coroutine def send(self, load, timeout=60): - self.connect() + yield self.connect() ret = yield self.message_client.send(load, timeout=timeout) raise salt.ext.tornado.gen.Return(ret) diff --git a/tests/pytests/unit/transport/test_base.py b/tests/pytests/unit/transport/test_base.py new file mode 100644 index 00000000000..da5a6fa2615 --- /dev/null +++ b/tests/pytests/unit/transport/test_base.py @@ -0,0 +1,21 @@ +""" +Unit tests for salt.transport.base. +""" +import pytest + +import salt.transport.base + +pytestmark = [ + pytest.mark.core_test, +] + + +def test_unclosed_warning(): + + transport = salt.transport.base.Transport() + assert transport._closing is False + assert transport._connect_called is False + transport.connect() + assert transport._connect_called is True + with pytest.warns(salt.transport.base.TransportWarning): + del transport diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 2bad5f9ae5f..61f4aaf3f84 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -1498,3 +1498,31 @@ def test_pub_client_init(minion_opts, io_loop): client = salt.transport.zeromq.PublishClient(minion_opts, io_loop) client.send(b"asf") client.close() + + +async def test_unclosed_request_client(minion_opts, io_loop): + minion_opts["master_uri"] = "tcp://127.0.0.1:4506" + client = salt.transport.zeromq.RequestClient(minion_opts, io_loop) + await client.connect() + try: + assert client._closing is False + with pytest.warns(salt.transport.base.TransportWarning): + client.__del__() + finally: + client.close() + + +async def test_unclosed_publish_client(minion_opts, io_loop): + minion_opts["id"] = "minion" + minion_opts["__role"] = "minion" + minion_opts["master_ip"] = "127.0.0.1" + minion_opts["zmq_filtering"] = True + minion_opts["zmq_monitor"] = True + client = salt.transport.zeromq.PublishClient(minion_opts, io_loop) + await client.connect(2121) + try: + assert client._closing is False + with pytest.warns(salt.transport.base.TransportWarning): + client.__del__() + finally: + client.close() From b6acb1bc3e4a6ddbfb181c8acabf0aaff6a420b8 Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Mon, 20 Nov 2023 14:14:55 -0700 Subject: [PATCH 168/196] Add changelog for un-closed transport warnings --- changelog/65554.fixed.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/65554.fixed.md diff --git a/changelog/65554.fixed.md b/changelog/65554.fixed.md new file mode 100644 index 00000000000..6d1598217e3 --- /dev/null +++ b/changelog/65554.fixed.md @@ -0,0 +1 @@ +Warn when an un-closed transport client is being garbage collected. From 09b869dd112f42b5639b824dd0c408f2fa3089c7 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 21 Nov 2023 12:38:07 +0000 Subject: [PATCH 169/196] Address formatting and lint issue Signed-off-by: Pedro Algarvio --- salt/transport/base.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/salt/transport/base.py b/salt/transport/base.py index 6fa6a5fee5d..2e4f68e4cc0 100644 --- a/salt/transport/base.py +++ b/salt/transport/base.py @@ -253,7 +253,9 @@ class PublishClient(Transport): raise NotImplementedError @salt.ext.tornado.gen.coroutine - def connect(self, publish_port, connect_callback=None, disconnect_callback=None): # pylint: disable=W0221 + def connect( # pylint: disable=arguments-differ + self, publish_port, connect_callback=None, disconnect_callback=None + ): """ Create a network connection to the the PublishServer or broker. """ From af12352cba4a4ecd3859addbe21ff7169546fc9c Mon Sep 17 00:00:00 2001 From: "Daniel A. Wozniak" Date: Tue, 21 Nov 2023 14:34:46 -0700 Subject: [PATCH 170/196] Close pub channel returned by eval_master coroutine --- salt/minion.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/salt/minion.py b/salt/minion.py index 08204be815b..29afda23504 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -953,7 +953,18 @@ class SMinion(MinionBase): "use_master_when_local", False ): io_loop = salt.ext.tornado.ioloop.IOLoop.current() - io_loop.run_sync(lambda: self.eval_master(self.opts, failed=True)) + + @salt.ext.tornado.gen.coroutine + def eval_master(): + """ + Wrap eval master in order to close the returned publish channel. + """ + master, pub_channel = yield self.eval_master(self.opts, failed=True) + pub_channel.close() + + io_loop.run_sync( + lambda: eval_master() # pylint: disable=unnecessary-lambda + ) self.gen_modules(initial_load=True, context=context) # If configured, cache pillar data on the minion From 5f5651f454e9aa604c50c7ac36a08bb430721262 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 22 Nov 2023 16:55:48 +0000 Subject: [PATCH 171/196] Upgrade to `aiohttp>=3.8.6` due to https://github.com/advisories/GHSA-gfw2-4jvh-wgfg Signed-off-by: Pedro Algarvio --- requirements/static/ci/py3.10/cloud.txt | 3 +-- requirements/static/ci/py3.10/darwin.txt | 3 +-- requirements/static/ci/py3.10/freebsd.txt | 3 +-- requirements/static/ci/py3.10/lint.txt | 3 +-- requirements/static/ci/py3.10/linux.txt | 3 +-- requirements/static/ci/py3.10/windows.txt | 3 +-- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/lint.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 3 +-- requirements/static/ci/py3.8/freebsd.txt | 3 +-- requirements/static/ci/py3.8/lint.txt | 3 +-- requirements/static/ci/py3.8/linux.txt | 3 +-- requirements/static/ci/py3.8/windows.txt | 3 +-- requirements/static/ci/py3.9/cloud.txt | 3 +-- requirements/static/ci/py3.9/darwin.txt | 3 +-- requirements/static/ci/py3.9/freebsd.txt | 3 +-- requirements/static/ci/py3.9/lint.txt | 3 +-- requirements/static/ci/py3.9/linux.txt | 3 +-- requirements/static/ci/py3.9/windows.txt | 3 +-- 22 files changed, 22 insertions(+), 39 deletions(-) diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 3e1dd36839f..55c1479cf3f 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via # -c requirements/static/ci/py3.10/linux.txt # etcd3-py @@ -91,7 +91,6 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index c04956bc6f9..5e0b7277879 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -68,7 +68,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/darwin.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 8ddf4a5876e..2caa3f55787 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -65,7 +65,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/freebsd.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index b337952b9cf..535dd16d192 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via # -c requirements/static/ci/py3.10/linux.txt # etcd3-py @@ -102,7 +102,6 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # -c requirements/static/ci/py3.10/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 2d232d5ff55..8b70902a83d 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -74,7 +74,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 7a7063de6a2..65f3feaa099 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp @@ -55,7 +55,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.10/windows.txt - # aiohttp # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 1c279ef2051..abc60cb0cd3 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.8.6 # via # -c requirements/static/ci/py3.7/linux.txt # etcd3-py diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 92532a8783d..691ca070cd1 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.8.6 # via etcd3-py aiosignal==1.2.0 # via aiohttp diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index 489c4cd9403..0d65dc1135b 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.8.6 # via # -c requirements/static/ci/py3.7/linux.txt # etcd3-py diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index ab9bc7f22aa..fa6e4a13411 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.8.6 # via etcd3-py aiosignal==1.2.0 # via aiohttp diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index e93c1ac92ea..1c42e998471 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.8.6 # via etcd3-py aiosignal==1.2.0 # via aiohttp diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index db414c83501..0234878abc3 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via # -c requirements/static/ci/py3.8/linux.txt # etcd3-py @@ -92,7 +92,6 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index 9e5db0f3934..fdd96b6f0ca 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -66,7 +66,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/freebsd.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index 39ab2aa8f2c..94558d08bd2 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via # -c requirements/static/ci/py3.8/linux.txt # etcd3-py @@ -107,7 +107,6 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt # -c requirements/static/ci/py3.8/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index efd0225b11f..304a5afb7b8 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -77,7 +77,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 9298ebfdbbe..9bbaf88cdbe 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -57,7 +57,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.8/windows.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 583798b82de..138ed879cb1 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via # -c requirements/static/ci/py3.9/linux.txt # etcd3-py @@ -92,7 +92,6 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index fc08c0ea394..0cd59678c16 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -69,7 +69,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/darwin.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index a961aa757fa..822fbfcfbe9 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -66,7 +66,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/freebsd.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index 2bf6e17fcb5..4984aa2b7c1 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via # -c requirements/static/ci/py3.9/linux.txt # etcd3-py @@ -103,7 +103,6 @@ charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # -c requirements/static/ci/py3.9/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 9c03f06b19e..8b0445ed84f 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -75,7 +75,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 6ab8afb9693..7da6d02dcc9 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -4,7 +4,7 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.5 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -57,7 +57,6 @@ cffi==1.14.6 charset-normalizer==3.2.0 # via # -c requirements/static/ci/../pkg/py3.9/windows.txt - # aiohttp # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in From 74aced4867bb6efc7e42007b61af33c023b69d28 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 26 May 2023 07:02:19 +0100 Subject: [PATCH 172/196] Concentrate pre-commit related tools commands under a parent `pre-commit` command Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 3 + tools/__init__.py | 4 +- tools/changelog.py | 111 ------------- tools/precommit/__init__.py | 9 ++ tools/precommit/changelog.py | 146 ++++++++++++++++++ .../{pre_commit.py => precommit/workflows.py} | 8 +- 6 files changed, 167 insertions(+), 114 deletions(-) create mode 100644 tools/precommit/__init__.py create mode 100644 tools/precommit/changelog.py rename tools/{pre_commit.py => precommit/workflows.py} (98%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index f6aa4fdba1a..e7979d8fc59 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -52,6 +52,7 @@ repos: alias: check-changelog-entries name: Check Changelog Entries args: + - pre-commit - changelog - pre-commit-checks additional_dependencies: @@ -67,6 +68,7 @@ repos: pass_filenames: false args: - pre-commit + - workflows - generate-workflows additional_dependencies: - boto3==1.21.46 @@ -82,6 +84,7 @@ repos: - yaml args: - pre-commit + - workflows - actionlint additional_dependencies: - boto3==1.21.46 diff --git a/tools/__init__.py b/tools/__init__.py index 01f3e188441..db61bd0ba16 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -10,7 +10,9 @@ ptscripts.register_tools_module("tools.pkg.repo") ptscripts.register_tools_module("tools.pkg.build") ptscripts.register_tools_module("tools.pkg.repo.create") ptscripts.register_tools_module("tools.pkg.repo.publish") -ptscripts.register_tools_module("tools.pre_commit") +ptscripts.register_tools_module("tools.precommit") +ptscripts.register_tools_module("tools.precommit.changelog") +ptscripts.register_tools_module("tools.precommit.workflows") ptscripts.register_tools_module("tools.release") ptscripts.register_tools_module("tools.testsuite") ptscripts.register_tools_module("tools.testsuite.download") diff --git a/tools/changelog.py b/tools/changelog.py index d4d8b662829..12bbba22d3c 100644 --- a/tools/changelog.py +++ b/tools/changelog.py @@ -8,7 +8,6 @@ import datetime import logging import os import pathlib -import re import sys import textwrap @@ -17,19 +16,6 @@ from ptscripts import Context, command_group from tools.utils import REPO_ROOT, Version -CHANGELOG_LIKE_RE = re.compile(r"([\d]+)\.([a-z]+)$") -CHANGELOG_TYPES = ( - "removed", - "deprecated", - "changed", - "fixed", - "added", - "security", -) -CHANGELOG_ENTRY_RE = re.compile( - r"([\d]+|(CVE|cve)-[\d]{{4}}-[\d]+)\.({})(\.md)?$".format("|".join(CHANGELOG_TYPES)) -) - log = logging.getLogger(__name__) # Define the command group @@ -50,103 +36,6 @@ changelog = command_group( ) -@changelog.command( - name="pre-commit-checks", - arguments={ - "files": { - "nargs": "*", - } - }, -) -def check_changelog_entries(ctx: Context, files: list[pathlib.Path]): - """ - Run pre-commit checks on changelog snippets. - """ - docs_path = REPO_ROOT / "doc" - tests_integration_files_path = REPO_ROOT / "tests" / "integration" / "files" - changelog_entries_path = REPO_ROOT / "changelog" - exitcode = 0 - for entry in files: - path = pathlib.Path(entry).resolve() - # Is it under changelog/ - try: - path.relative_to(changelog_entries_path) - if path.name in (".keep", ".template.jinja"): - # This is the file we use so git doesn't delete the changelog/ directory - continue - # Is it named properly - if not CHANGELOG_ENTRY_RE.match(path.name): - ctx.error( - "The changelog entry '{}' should have one of the following extensions: {}.".format( - path.relative_to(REPO_ROOT), - ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), - ), - ) - exitcode = 1 - continue - if path.suffix != ".md": - ctx.error( - f"Please rename '{path.relative_to(REPO_ROOT)}' to " - f"'{path.relative_to(REPO_ROOT)}.md'" - ) - exitcode = 1 - continue - except ValueError: - # No, carry on - pass - # Does it look like a changelog entry - if CHANGELOG_LIKE_RE.match(path.name) and not CHANGELOG_ENTRY_RE.match( - path.name - ): - try: - # Is this under doc/ - path.relative_to(docs_path) - # Yes, carry on - continue - except ValueError: - # No, resume the check - pass - try: - # Is this under tests/integration/files - path.relative_to(tests_integration_files_path) - # Yes, carry on - continue - except ValueError: - # No, resume the check - pass - ctx.error( - "The changelog entry '{}' should have one of the following extensions: {}.".format( - path.relative_to(REPO_ROOT), - ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), - ) - ) - exitcode = 1 - continue - # Is it a changelog entry - if not CHANGELOG_ENTRY_RE.match(path.name): - # No? Carry on - continue - # Is the changelog entry in the right path? - try: - path.relative_to(changelog_entries_path) - except ValueError: - exitcode = 1 - ctx.error( - "The changelog entry '{}' should be placed under '{}/', not '{}'".format( - path.name, - changelog_entries_path.relative_to(REPO_ROOT), - path.relative_to(REPO_ROOT).parent, - ) - ) - if path.suffix != ".md": - ctx.error( - f"Please rename '{path.relative_to(REPO_ROOT)}' to " - f"'{path.relative_to(REPO_ROOT)}.md'" - ) - exitcode = 1 - ctx.exit(exitcode) - - def _get_changelog_contents(ctx: Context, version: Version): """ Return the full changelog generated by towncrier. diff --git a/tools/precommit/__init__.py b/tools/precommit/__init__.py new file mode 100644 index 00000000000..57d9d1ae62a --- /dev/null +++ b/tools/precommit/__init__.py @@ -0,0 +1,9 @@ +""" +These commands, and sub-commands, are used by pre-commit. +""" +from ptscripts import command_group + +# Define the command group +cgroup = command_group( + name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ +) diff --git a/tools/precommit/changelog.py b/tools/precommit/changelog.py new file mode 100644 index 00000000000..5e108af5f11 --- /dev/null +++ b/tools/precommit/changelog.py @@ -0,0 +1,146 @@ +""" +These commands are used to validate changelog entries +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import pathlib +import re +import sys + +from ptscripts import Context, command_group + +import tools.utils + +log = logging.getLogger(__name__) + +CHANGELOG_LIKE_RE = re.compile(r"([\d]+)\.([a-z]+)$") +CHANGELOG_TYPES = ( + "removed", + "deprecated", + "changed", + "fixed", + "added", + "security", +) +CHANGELOG_ENTRY_RE = re.compile( + r"([\d]+|(CVE|cve)-[\d]{{4}}-[\d]+)\.({})(\.md)?$".format("|".join(CHANGELOG_TYPES)) +) + +# Define the command group +changelog = command_group( + name="changelog", + help="Changelog tools", + description=__doc__, + venv_config={ + "requirements_files": [ + tools.utils.REPO_ROOT + / "requirements" + / "static" + / "ci" + / "py{}.{}".format(*sys.version_info) + / "changelog.txt" + ], + }, + parent="pre-commit", +) + + +@changelog.command( + name="pre-commit-checks", + arguments={ + "files": { + "nargs": "*", + } + }, +) +def check_changelog_entries(ctx: Context, files: list[pathlib.Path]): + """ + Run pre-commit checks on changelog snippets. + """ + docs_path = tools.utils.REPO_ROOT / "doc" + tests_integration_files_path = ( + tools.utils.REPO_ROOT / "tests" / "integration" / "files" + ) + changelog_entries_path = tools.utils.REPO_ROOT / "changelog" + exitcode = 0 + for entry in files: + path = pathlib.Path(entry).resolve() + # Is it under changelog/ + try: + path.relative_to(changelog_entries_path) + if path.name in (".keep", ".template.jinja"): + # This is the file we use so git doesn't delete the changelog/ directory + continue + # Is it named properly + if not CHANGELOG_ENTRY_RE.match(path.name): + ctx.error( + "The changelog entry '{}' should have one of the following extensions: {}.".format( + path.relative_to(tools.utils.REPO_ROOT), + ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), + ), + ) + exitcode = 1 + continue + if path.suffix != ".md": + ctx.error( + f"Please rename '{path.relative_to(tools.utils.REPO_ROOT)}' to " + f"'{path.relative_to(tools.utils.REPO_ROOT)}.md'" + ) + exitcode = 1 + continue + except ValueError: + # No, carry on + pass + # Does it look like a changelog entry + if CHANGELOG_LIKE_RE.match(path.name) and not CHANGELOG_ENTRY_RE.match( + path.name + ): + try: + # Is this under doc/ + path.relative_to(docs_path) + # Yes, carry on + continue + except ValueError: + # No, resume the check + pass + try: + # Is this under tests/integration/files + path.relative_to(tests_integration_files_path) + # Yes, carry on + continue + except ValueError: + # No, resume the check + pass + ctx.error( + "The changelog entry '{}' should have one of the following extensions: {}.".format( + path.relative_to(tools.utils.REPO_ROOT), + ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), + ) + ) + exitcode = 1 + continue + # Is it a changelog entry + if not CHANGELOG_ENTRY_RE.match(path.name): + # No? Carry on + continue + # Is the changelog entry in the right path? + try: + path.relative_to(changelog_entries_path) + except ValueError: + exitcode = 1 + ctx.error( + "The changelog entry '{}' should be placed under '{}/', not '{}'".format( + path.name, + changelog_entries_path.relative_to(tools.utils.REPO_ROOT), + path.relative_to(tools.utils.REPO_ROOT).parent, + ) + ) + if path.suffix != ".md": + ctx.error( + f"Please rename '{path.relative_to(tools.utils.REPO_ROOT)}' to " + f"'{path.relative_to(tools.utils.REPO_ROOT)}.md'" + ) + exitcode = 1 + ctx.exit(exitcode) diff --git a/tools/pre_commit.py b/tools/precommit/workflows.py similarity index 98% rename from tools/pre_commit.py rename to tools/precommit/workflows.py index 337c18ea012..855a5e07987 100644 --- a/tools/pre_commit.py +++ b/tools/precommit/workflows.py @@ -1,5 +1,5 @@ """ -These commands are used by pre-commit. +These commands are used for our GitHub Actions workflows. """ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations @@ -19,9 +19,13 @@ log = logging.getLogger(__name__) WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" TEMPLATES = WORKFLOWS / "templates" + # Define the command group cgroup = command_group( - name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ + name="workflows", + help="Pre-Commit GH Actions Workflows Related Commands", + description=__doc__, + parent="pre-commit", ) From 185a352d0090a9780d8cb122fe72c7e86f752220 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 15 Nov 2023 12:19:08 +0000 Subject: [PATCH 173/196] Update the tools requirements Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 56 +++++++++------ .github/workflows/nightly.yml | 56 +++++++++------ .github/workflows/scheduled.yml | 56 +++++++++------ .github/workflows/staging.yml | 51 ++++++++------ .github/workflows/templates/ci.yml.jinja | 48 +++++++------ .github/workflows/templates/layout.yml.jinja | 12 ++++ .gitignore | 2 + .pre-commit-config.yaml | 25 ++++--- .../static/ci/py3.10/tools-virustotal.txt | 28 ++++++++ requirements/static/ci/py3.10/tools.txt | 70 ++++++++++++------- .../static/ci/py3.9/tools-virustotal.txt | 28 ++++++++ requirements/static/ci/py3.9/tools.txt | 70 ++++++++++++------- requirements/static/ci/tools-virustotal.in | 3 + requirements/static/ci/tools.in | 5 +- tools/__init__.py | 31 +++++++- tools/pkg/__init__.py | 2 +- tools/pkg/repo/__init__.py | 15 +--- tools/pkg/repo/create.py | 15 +--- tools/pkg/repo/publish.py | 15 +--- tools/release.py | 15 +--- tools/utils/__init__.py | 9 ++- tools/vm.py | 35 ++++------ 22 files changed, 403 insertions(+), 244 deletions(-) create mode 100644 requirements/static/ci/py3.10/tools-virustotal.txt create mode 100644 requirements/static/ci/py3.9/tools-virustotal.txt create mode 100644 requirements/static/ci/tools-virustotal.in diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3c93e9bc4a0..d54ce0bbfff 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,6 +134,18 @@ jobs: with: python-version: "3.10" + - name: Get Python Version + id: get-python-version + uses: ./.github/actions/get-python-version + with: + python-binary: python3 + + - name: Restore Cached Python Tools Virtualenvs + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -269,29 +281,29 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Get Python Version id: get-python-version uses: ./.github/actions/get-python-version with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - - - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -412,8 +424,16 @@ jobs: with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - name: Setup Salt Version @@ -422,12 +442,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index c35b3126e37..89119b5c61a 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -178,6 +178,18 @@ jobs: with: python-version: "3.10" + - name: Get Python Version + id: get-python-version + uses: ./.github/actions/get-python-version + with: + python-binary: python3 + + - name: Restore Cached Python Tools Virtualenvs + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -313,29 +325,29 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Get Python Version id: get-python-version uses: ./.github/actions/get-python-version with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - - - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -461,8 +473,16 @@ jobs: with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - name: Setup Salt Version @@ -471,12 +491,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 527d224cd74..3eb379f7772 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -168,6 +168,18 @@ jobs: with: python-version: "3.10" + - name: Get Python Version + id: get-python-version + uses: ./.github/actions/get-python-version + with: + python-binary: python3 + + - name: Restore Cached Python Tools Virtualenvs + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -303,29 +315,29 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - name: Get Python Version id: get-python-version uses: ./.github/actions/get-python-version with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - - - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -446,8 +458,16 @@ jobs: with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - name: Setup Salt Version @@ -456,12 +476,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index a2ab55dad87..4f7291a334c 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -164,6 +164,18 @@ jobs: with: python-version: "3.10" + - name: Get Python Version + id: get-python-version + uses: ./.github/actions/get-python-version + with: + python-binary: python3 + + - name: Restore Cached Python Tools Virtualenvs + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts @@ -318,23 +330,18 @@ jobs: with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - - - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -456,8 +463,16 @@ jobs: with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - name: Setup Salt Version @@ -466,12 +481,6 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 3ae87056381..ff9f773898c 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -59,29 +59,33 @@ steps: - uses: actions/checkout@v4 + <%- if not prepare_actual_release %> + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + <%- endif %> + - name: Get Python Version id: get-python-version uses: ./.github/actions/get-python-version with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - - - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -228,8 +232,16 @@ with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + restore-keys: | + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build + ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - name: Setup Salt Version @@ -238,12 +250,6 @@ with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index bf98f9c2277..21c46817ad7 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -182,6 +182,18 @@ jobs: with: python-version: "3.10" + - name: Get Python Version + id: get-python-version + uses: ./.github/actions/get-python-version + with: + python-binary: python3 + + - name: Restore Cached Python Tools Virtualenvs + uses: actions/cache@v3 + with: + path: .tools-venvs + key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts diff --git a/.gitignore b/.gitignore index c933bbf79d2..f4f1babbb7c 100644 --- a/.gitignore +++ b/.gitignore @@ -145,3 +145,5 @@ nox.*.tar.xz /pkg/debian/salt-ssh /pkg/debian/salt-syndic /pkg/debian/debhelper-build-stamp + +.tools-venvs diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e7979d8fc59..aee06411e53 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: )$ - repo: https://github.com/s0undt3ch/python-tools-scripts - rev: "0.15.0" + rev: "0.18.3" hooks: - id: tools alias: check-changelog-entries @@ -1078,16 +1078,24 @@ repos: - requirements/static/ci/tools.in - id: pip-tools-compile - alias: compile-ci-tools-3.11-requirements - name: Linux CI Py3.11 Tools Requirements - files: ^requirements/static/ci/(tools\.in|py3.11/(tools|linux)\.txt)$ + alias: compile-ci-tools-virustotal-3.9-requirements + name: Linux CI Py3.9 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.9/(tools(-virustotal)?|linux)\.txt)$ pass_filenames: false args: - -v - - --build-isolation - - --py-version=3.11 - - --no-emit-index-url - - requirements/static/ci/tools.in + - --py-version=3.9 + - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.10-requirements + name: Linux CI Py3.10 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.10/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.10 + - requirements/static/ci/tools-virustotal.in # <---- Tools ----------------------------------------------------------------------------------------------------- # ----- Code Formatting -------------------------------------------------------------------------------------------> @@ -1319,6 +1327,7 @@ repos: - types-attrs - types-pyyaml - types-requests + - python-tools-scripts>=0.18.3 - repo: https://github.com/saltstack/mirrors-nox rev: v2021.6.12 diff --git a/requirements/static/ci/py3.10/tools-virustotal.txt b/requirements/static/ci/py3.10/tools-virustotal.txt new file mode 100644 index 00000000000..11aa11ca27b --- /dev/null +++ b/requirements/static/ci/py3.10/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.10/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index 3cd670b6dfe..69f0c3896c0 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -4,57 +4,77 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/tools.txt requirements/static/ci/tools.in # -attrs==22.1.0 +attrs==20.3.0 # via # -r requirements/static/ci/tools.in # python-tools-scripts -boto3==1.21.46 +boto3==1.26.147 # via -r requirements/static/ci/tools.in -botocore==1.24.46 +botocore==1.29.147 # via # boto3 # s3transfer -certifi==2023.07.22 - # via requests -charset-normalizer==3.0.1 - # via requests -commonmark==0.9.1 - # via rich +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests idna==3.2 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests jinja2==3.1.2 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/ci/tools.in jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # boto3 # botocore +markdown-it-py==2.2.0 + # via rich markupsafe==2.1.2 - # via jinja2 -packaging==23.0 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # jinja2 +mdurl==0.1.2 + # via markdown-it-py +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/ci/tools.in pygments==2.13.0 # via rich -python-dateutil==2.8.2 - # via botocore -python-tools-scripts==0.18.1 +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # botocore +python-tools-scripts==0.18.3 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/ci/tools.in requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # python-tools-scripts - # virustotal3 -rich==12.5.1 +rich==13.3.5 # via python-tools-scripts -s3transfer==0.5.2 +s3transfer==0.6.1 # via boto3 six==1.16.0 - # via python-dateutil -typing-extensions==4.4.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # python-dateutil +typing-extensions==4.2.0 # via python-tools-scripts urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # botocore # requests -virustotal3==1.0.8 - # via -r requirements/static/ci/tools.in diff --git a/requirements/static/ci/py3.9/tools-virustotal.txt b/requirements/static/ci/py3.9/tools-virustotal.txt new file mode 100644 index 00000000000..6972dd80375 --- /dev/null +++ b/requirements/static/ci/py3.9/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.9/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index a5ae88526d4..018373ce635 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -4,57 +4,77 @@ # # pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/tools.txt requirements/static/ci/tools.in # -attrs==22.1.0 +attrs==20.3.0 # via # -r requirements/static/ci/tools.in # python-tools-scripts -boto3==1.21.46 +boto3==1.26.147 # via -r requirements/static/ci/tools.in -botocore==1.24.46 +botocore==1.29.147 # via # boto3 # s3transfer -certifi==2023.07.22 - # via requests -charset-normalizer==3.0.1 - # via requests -commonmark==0.9.1 - # via rich +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests idna==3.2 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests jinja2==3.1.2 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/ci/tools.in jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # boto3 # botocore +markdown-it-py==2.2.0 + # via rich markupsafe==2.1.2 - # via jinja2 -packaging==23.0 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # jinja2 +mdurl==0.1.2 + # via markdown-it-py +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/ci/tools.in pygments==2.13.0 # via rich -python-dateutil==2.8.2 - # via botocore -python-tools-scripts==0.18.1 +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # botocore +python-tools-scripts==0.18.3 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/ci/tools.in requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # python-tools-scripts - # virustotal3 -rich==12.5.1 +rich==13.3.5 # via python-tools-scripts -s3transfer==0.5.2 +s3transfer==0.6.1 # via boto3 six==1.16.0 - # via python-dateutil -typing-extensions==4.4.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # python-dateutil +typing-extensions==4.2.0 # via python-tools-scripts urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # botocore # requests -virustotal3==1.0.8 - # via -r requirements/static/ci/tools.in diff --git a/requirements/static/ci/tools-virustotal.in b/requirements/static/ci/tools-virustotal.in new file mode 100644 index 00000000000..b7d1a356f4e --- /dev/null +++ b/requirements/static/ci/tools-virustotal.in @@ -0,0 +1,3 @@ +--constraint=../pkg/py{py_version}/{platform}.txt + +virustotal3 diff --git a/requirements/static/ci/tools.in b/requirements/static/ci/tools.in index 9066c498fcc..143cab05113 100644 --- a/requirements/static/ci/tools.in +++ b/requirements/static/ci/tools.in @@ -1,7 +1,8 @@ -python-tools-scripts >= 0.18.1 +--constraint=../pkg/py{py_version}/{platform}.txt + attrs +python-tools-scripts >= 0.18.3 boto3 pyyaml jinja2 packaging -virustotal3 diff --git a/tools/__init__.py b/tools/__init__.py index db61bd0ba16..22be82c40de 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -1,7 +1,36 @@ import logging +import pathlib +import sys import ptscripts +from ptscripts.parser import DefaultRequirementsConfig +from ptscripts.virtualenv import VirtualEnvConfig +REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent +REQUIREMENTS_FILES_PATH = REPO_ROOT / "requirements" +STATIC_REQUIREMENTS_PATH = REQUIREMENTS_FILES_PATH / "static" +CI_REQUIREMENTS_FILES_PATH = ( + STATIC_REQUIREMENTS_PATH / "ci" / "py{}.{}".format(*sys.version_info) +) +PKG_REQUIREMENTS_FILES_PATH = ( + STATIC_REQUIREMENTS_PATH / "pkg" / "py{}.{}".format(*sys.version_info) +) +DEFAULT_REQS_CONFIG = DefaultRequirementsConfig( + pip_args=[ + f"--constraint={PKG_REQUIREMENTS_FILES_PATH / 'linux.txt'}", + ], + requirements_files=[ + REQUIREMENTS_FILES_PATH / "base.txt", + CI_REQUIREMENTS_FILES_PATH / "tools.txt", + ], +) +RELEASE_VENV_CONFIG = VirtualEnvConfig( + requirements_files=[ + CI_REQUIREMENTS_FILES_PATH / "tools-virustotal.txt", + ], + add_as_extra_site_packages=True, +) +ptscripts.set_default_requirements_config(DEFAULT_REQS_CONFIG) ptscripts.register_tools_module("tools.changelog") ptscripts.register_tools_module("tools.ci") ptscripts.register_tools_module("tools.docs") @@ -13,9 +42,9 @@ ptscripts.register_tools_module("tools.pkg.repo.publish") ptscripts.register_tools_module("tools.precommit") ptscripts.register_tools_module("tools.precommit.changelog") ptscripts.register_tools_module("tools.precommit.workflows") -ptscripts.register_tools_module("tools.release") ptscripts.register_tools_module("tools.testsuite") ptscripts.register_tools_module("tools.testsuite.download") +ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) ptscripts.register_tools_module("tools.vm") for name in ("boto3", "botocore", "urllib3"): diff --git a/tools/pkg/__init__.py b/tools/pkg/__init__.py index 5b97eaa6778..05612996655 100644 --- a/tools/pkg/__init__.py +++ b/tools/pkg/__init__.py @@ -154,7 +154,7 @@ def set_salt_version( ret = venv.run_code(code, capture=True, check=False) if ret.returncode: ctx.error(ret.stderr.decode()) - ctx.exit(ctx.returncode) + ctx.exit(ret.returncode) salt_version = ret.stdout.strip().decode() if not tools.utils.REPO_ROOT.joinpath("salt").is_dir(): diff --git a/tools/pkg/repo/__init__.py b/tools/pkg/repo/__init__.py index 5599bfd5722..e48671051f2 100644 --- a/tools/pkg/repo/__init__.py +++ b/tools/pkg/repo/__init__.py @@ -8,27 +8,16 @@ import logging import os import pathlib import shutil -import sys from typing import TYPE_CHECKING +import boto3 +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.pkg import tools.utils from tools.utils import Version, get_salt_releases -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) # Define the command group diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py index 8dfbf9dc459..a665340098c 100644 --- a/tools/pkg/repo/create.py +++ b/tools/pkg/repo/create.py @@ -10,11 +10,11 @@ import logging import os import pathlib import shutil -import sys import textwrap from datetime import datetime from typing import TYPE_CHECKING +import boto3 from ptscripts import Context, command_group import tools.pkg @@ -26,17 +26,6 @@ from tools.utils.repo import ( get_repo_json_file_contents, ) -try: - import boto3 -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) create = command_group( @@ -157,7 +146,7 @@ def debian( distro_details = _deb_distro_info[distro][distro_version] ctx.info("Distribution Details:") - ctx.info(distro_details) + ctx.print(distro_details, soft_wrap=True) if TYPE_CHECKING: assert isinstance(distro_details["label"], str) assert isinstance(distro_details["codename"], str) diff --git a/tools/pkg/repo/publish.py b/tools/pkg/repo/publish.py index 3ad0ec9e428..2a743ac046b 100644 --- a/tools/pkg/repo/publish.py +++ b/tools/pkg/repo/publish.py @@ -10,12 +10,13 @@ import logging import os import pathlib import re -import sys import tempfile import textwrap from typing import TYPE_CHECKING, Any +import boto3 import packaging.version +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.pkg @@ -24,18 +25,6 @@ import tools.utils.repo from tools.utils import Version, get_salt_releases, parse_versions from tools.utils.repo import create_full_repo_path, get_repo_json_file_contents -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) publish = command_group( diff --git a/tools/release.py b/tools/release.py index f78e93c07ec..cc17938d453 100644 --- a/tools/release.py +++ b/tools/release.py @@ -8,28 +8,17 @@ import json import logging import os import pathlib -import sys import tempfile import time +import boto3 import virustotal3.core +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.utils import tools.utils.repo -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - log = logging.getLogger(__name__) # Define the command group diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py index b5dda0ddcb3..bebc9c98eb3 100644 --- a/tools/utils/__init__.py +++ b/tools/utils/__init__.py @@ -14,7 +14,9 @@ from datetime import datetime from enum import IntEnum from typing import Any +import boto3 import packaging.version +from botocore.exceptions import ClientError from ptscripts import Context from rich.progress import ( BarColumn, @@ -217,7 +219,7 @@ def download_file( ctx: Context, url: str, dest: pathlib.Path, - auth: str | None = None, + auth: tuple[str, str] | None = None, headers: dict[str, str] | None = None, ) -> pathlib.Path: ctx.info(f"Downloading {dest.name!r} @ {url} ...") @@ -235,7 +237,7 @@ def download_file( return dest wget = shutil.which("wget") if wget is not None: - with ctx.cwd(dest.parent): + with ctx.chdir(dest.parent): command = [wget, "--no-verbose"] if headers: for key, value in headers.items(): @@ -248,7 +250,8 @@ def download_file( return dest # NOTE the stream=True parameter below with ctx.web as web: - web.headers.update(headers) + if headers: + web.headers.update(headers) with web.get(url, stream=True, auth=auth) as r: r.raise_for_status() with dest.open("wb") as f: diff --git a/tools/vm.py b/tools/vm.py index ca3717aa909..a8fa51ea748 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -21,33 +21,22 @@ from datetime import datetime from functools import lru_cache from typing import TYPE_CHECKING, cast +import attr +import boto3 +from botocore.exceptions import ClientError from ptscripts import Context, command_group from requests.exceptions import ConnectTimeout +from rich.progress import ( + BarColumn, + Column, + Progress, + TaskProgressColumn, + TextColumn, + TimeRemainingColumn, +) import tools.utils -try: - import attr - import boto3 - from botocore.exceptions import ClientError - from rich.progress import ( - BarColumn, - Column, - Progress, - TaskProgressColumn, - TextColumn, - TimeRemainingColumn, - ) -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - - if TYPE_CHECKING: # pylint: disable=no-name-in-module from boto3.resources.factory.ec2 import Instance @@ -1313,6 +1302,8 @@ class VM: "--exclude", ".nox/", "--exclude", + ".tools-venvs/", + "--exclude", ".pytest_cache/", "--exclude", f"{STATE_DIR.relative_to(tools.utils.REPO_ROOT)}{os.path.sep}", From 54ae2e5e84b97d1bb02e5f912131033b59b49582 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 16 Nov 2023 18:05:58 +0000 Subject: [PATCH 174/196] The `setup-python-tools-scripts` actions now takes care of all the caching Signed-off-by: Pedro Algarvio --- .github/actions/get-python-version/action.yml | 14 ++++ .../setup-python-tools-scripts/action.yml | 17 ++++ .github/workflows/build-deb-packages.yml | 5 ++ .github/workflows/build-docs.yml | 2 + .github/workflows/build-macos-packages.yml | 6 ++ .github/workflows/build-rpm-packages.yml | 6 ++ .github/workflows/build-windows-packages.yml | 6 ++ .github/workflows/ci.yml | 61 +++++---------- .github/workflows/nightly.yml | 77 ++++++++---------- .../workflows/release-upload-virustotal.yml | 2 + .github/workflows/release.yml | 12 +++ .github/workflows/scheduled.yml | 61 +++++---------- .github/workflows/staging.yml | 78 +++++++++---------- .../templates/build-deb-repo.yml.jinja | 2 + .../templates/build-macos-repo.yml.jinja | 2 + .../templates/build-onedir-repo.yml.jinja | 2 + .../templates/build-packages.yml.jinja | 1 + .../templates/build-rpm-repo.yml.jinja | 2 + .../templates/build-src-repo.yml.jinja | 4 +- .../templates/build-windows-repo.yml.jinja | 2 + .github/workflows/templates/ci.yml.jinja | 39 +++------- .github/workflows/templates/layout.yml.jinja | 14 +--- .github/workflows/templates/nightly.yml.jinja | 2 + .github/workflows/templates/release.yml.jinja | 14 ++++ .github/workflows/templates/staging.yml.jinja | 4 + .github/workflows/test-action-macos.yml | 2 + .github/workflows/test-action.yml | 4 + .../workflows/test-packages-action-macos.yml | 2 + .github/workflows/test-packages-action.yml | 4 + 29 files changed, 234 insertions(+), 213 deletions(-) diff --git a/.github/actions/get-python-version/action.yml b/.github/actions/get-python-version/action.yml index e64d285bca5..f2b045f7ca7 100644 --- a/.github/actions/get-python-version/action.yml +++ b/.github/actions/get-python-version/action.yml @@ -13,6 +13,8 @@ outputs: value: ${{ steps.get-python-version.outputs.version }} full-version: value: ${{ steps.get-python-version.outputs.full-version }} + version-sha256sum: + value: ${{ steps.get-python-version.outputs.version-sha256sum }} runs: @@ -20,12 +22,24 @@ runs: steps: + - name: Install System Packages + if: ${{ runner.os == 'macOS' }} + shell: bash + run: | + brew install coreutils + - name: Get Python Version id: get-python-version shell: bash run: | + echo "Python Binary: ${{ inputs.python-binary }}" echo "binary=${{ inputs.python-binary }}" >> "$GITHUB_OUTPUT" PY_VERSION=$(${{ inputs.python-binary }} -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info))") + echo "PY_VERSION=$PY_VERSION" echo "version=$PY_VERSION" >> "$GITHUB_OUTPUT" PY_FULL_VERSION=$(${{ inputs.python-binary }} -c "import sys; sys.stdout.write('{}.{}.{}'.format(*sys.version_info))") + echo "PY_FULL_VERSION=$PY_FULL_VERSION" echo "full-version=$PY_FULL_VERSION" >> "$GITHUB_OUTPUT" + VERSION_SHA256SUM=$(${{ inputs.python-binary }} --version --version | sha256sum | cut -d ' ' -f 1) + echo "VERSION_SHA256SUM=$VERSION_SHA256SUM" + echo "version-sha256sum=$VERSION_SHA256SUM" >> "$GITHUB_OUTPUT" diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 72bcf3b1d37..85123e98fe5 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -3,6 +3,14 @@ name: setup-python-tools-scripts description: Setup 'python-tools-scripts' inputs: + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + cache-suffix: + required: false + type: string + description: Seed used to invalidate caches cwd: type: string description: The directory the salt checkout is located in @@ -29,6 +37,15 @@ runs: with: python-binary: python3 + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3 + with: + path: ${{ inputs.cwd }}/.tools-venvs + key: ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ inputs.cache-suffix && format('|{0}', inputs.cache-suffix) || '' }} + restore-keys: | + ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ inputs.cache-suffix && format('|{0}', inputs.cache-suffix) || '' }} + ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} + - name: Install 'python-tools-scripts' shell: bash working-directory: ${{ inputs.cwd }} diff --git a/.github/workflows/build-deb-packages.yml b/.github/workflows/build-deb-packages.yml index 31cc710ed3f..4d7bbdcc824 100644 --- a/.github/workflows/build-deb-packages.yml +++ b/.github/workflows/build-deb-packages.yml @@ -20,6 +20,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -75,6 +79,7 @@ jobs: uses: ./.github/actions/setup-python-tools-scripts with: cwd: pkgs/checkout/ + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index eef8243169f..fea955d9d66 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -56,6 +56,8 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }} - name: Configure Git if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} diff --git a/.github/workflows/build-macos-packages.yml b/.github/workflows/build-macos-packages.yml index 440aefba715..67044951b5a 100644 --- a/.github/workflows/build-macos-packages.yml +++ b/.github/workflows/build-macos-packages.yml @@ -28,6 +28,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -81,6 +85,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index dfd62c10e8e..1b2103700c9 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -20,6 +20,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -64,6 +68,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/build-windows-packages.yml b/.github/workflows/build-windows-packages.yml index 821d33c60d4..d8c28b96f45 100644 --- a/.github/workflows/build-windows-packages.yml +++ b/.github/workflows/build-windows-packages.yml @@ -28,6 +28,10 @@ on: required: true type: string description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 @@ -92,6 +96,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index d54ce0bbfff..478f78ae0ad 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,20 +134,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Cached Python Tools Virtualenvs - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -286,23 +276,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: changelog - name: Setup Salt Version id: setup-salt-version @@ -418,23 +396,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: build - name: Setup Salt Version id: setup-salt-version @@ -546,6 +512,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -559,6 +526,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -572,6 +540,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -585,6 +554,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -598,6 +568,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -611,6 +582,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -624,6 +596,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -637,6 +610,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -2765,6 +2739,9 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: coverage - name: Install Nox run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 89119b5c61a..54ed810e08f 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -178,20 +178,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Cached Python Tools Virtualenvs - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -330,23 +320,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: changelog - name: Setup Salt Version id: setup-salt-version @@ -467,23 +445,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: build - name: Setup Salt Version id: setup-salt-version @@ -595,6 +561,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -608,6 +575,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -621,6 +589,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -634,6 +603,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -647,6 +617,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -663,6 +634,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -679,6 +651,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -695,6 +668,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -2826,6 +2800,9 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: coverage - name: Install Nox run: | @@ -2937,7 +2914,9 @@ jobs: - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts + uses: ./.github/actions/setup-python-tools-script + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}s - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3080,6 +3059,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3299,6 +3280,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3385,6 +3368,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3485,6 +3470,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3569,6 +3556,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3765,6 +3754,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 diff --git a/.github/workflows/release-upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml index d47d6ce6a6f..da13d83ca80 100644 --- a/.github/workflows/release-upload-virustotal.yml +++ b/.github/workflows/release-upload-virustotal.yml @@ -43,6 +43,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: virus-total - name: Upload to VirusTotal env: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6b7b0037a8d..7d2d473ddaa 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,6 +70,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -142,6 +144,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -808,6 +812,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Backup Previous Releases id: backup @@ -838,6 +844,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Publish Release Repository env: @@ -921,6 +929,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -1024,6 +1034,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 3eb379f7772..9650cf46f96 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -168,20 +168,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Cached Python Tools Virtualenvs - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -320,23 +310,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: changelog - name: Setup Salt Version id: setup-salt-version @@ -452,23 +430,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: build - name: Setup Salt Version id: setup-salt-version @@ -580,6 +546,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -593,6 +560,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -606,6 +574,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -619,6 +588,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -632,6 +602,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -645,6 +616,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -658,6 +630,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -671,6 +644,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -2799,6 +2773,9 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: coverage - name: Install Nox run: | diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 4f7291a334c..7ce8aa13cfc 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -164,20 +164,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Cached Python Tools Virtualenvs - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -324,23 +314,11 @@ jobs: steps: - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: changelog - name: Setup Salt Version id: setup-salt-version @@ -457,23 +435,11 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: build - name: Setup Salt Version id: setup-salt-version @@ -585,6 +551,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -598,6 +565,7 @@ jobs: uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -611,6 +579,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -624,6 +593,7 @@ jobs: uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -637,6 +607,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -653,6 +624,7 @@ jobs: uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -669,6 +641,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "onedir" @@ -685,6 +658,7 @@ jobs: uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "0.14.2" python-version: "3.10.13" source: "src" @@ -2747,7 +2721,9 @@ jobs: - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts + uses: ./.github/actions/setup-python-tools-script + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}s - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -2890,6 +2866,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3109,6 +3087,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3197,6 +3177,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3297,6 +3279,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3381,6 +3365,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -3506,6 +3492,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 @@ -3551,6 +3539,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Release Patch uses: actions/download-artifact@v3 @@ -3768,6 +3758,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/templates/build-deb-repo.yml.jinja b/.github/workflows/templates/build-deb-repo.yml.jinja index 165c60c02e9..8d9c054405f 100644 --- a/.github/workflows/templates/build-deb-repo.yml.jinja +++ b/.github/workflows/templates/build-deb-repo.yml.jinja @@ -31,6 +31,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja index 5f9b14a9904..916686f5968 100644 --- a/.github/workflows/templates/build-macos-repo.yml.jinja +++ b/.github/workflows/templates/build-macos-repo.yml.jinja @@ -10,6 +10,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja index 70deec70b7d..9b1daf3ce7e 100644 --- a/.github/workflows/templates/build-onedir-repo.yml.jinja +++ b/.github/workflows/templates/build-onedir-repo.yml.jinja @@ -10,6 +10,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index 37cae00b1e8..b5086a75e58 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -20,6 +20,7 @@ uses: ./.github/workflows/build-<{ pkg_type }>-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} relenv-version: "<{ relenv_version }>" python-version: "<{ python_version }>" source: "<{ backend }>" diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 7ed17a163db..7e99a968696 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -54,6 +54,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja index f0c1a82b7c3..06f1745c8ca 100644 --- a/.github/workflows/templates/build-src-repo.yml.jinja +++ b/.github/workflows/templates/build-src-repo.yml.jinja @@ -9,7 +9,9 @@ - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts + uses: ./.github/actions/setup-python-tools-script + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}s - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/build-windows-repo.yml.jinja b/.github/workflows/templates/build-windows-repo.yml.jinja index dc96a8a2e31..a86daf7f58b 100644 --- a/.github/workflows/templates/build-windows-repo.yml.jinja +++ b/.github/workflows/templates/build-windows-repo.yml.jinja @@ -10,6 +10,8 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index ff9f773898c..79b322cc812 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -68,23 +68,11 @@ <%- endif %> - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|changelog - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: changelog - name: Setup Salt Version id: setup-salt-version @@ -226,23 +214,11 @@ with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Python Tools Virtualenvs Cache - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - restore-keys: | - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}|build - ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: build - name: Setup Salt Version id: setup-salt-version @@ -352,6 +328,9 @@ - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + cache-suffix: coverage - name: Install Nox run: | diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 21c46817ad7..e16b70d4bd3 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -182,20 +182,10 @@ jobs: with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - - name: Restore Cached Python Tools Virtualenvs - uses: actions/cache@v3 - with: - path: .tools-venvs - key: ${{ env.CACHE_SEED }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index e4f6bb8439e..e4350f44a36 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -167,6 +167,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 7c5c28af059..ae1216ccbf0 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -98,6 +98,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -176,6 +178,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -220,6 +224,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Backup Previous Releases id: backup @@ -251,6 +257,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Publish Release Repository env: @@ -287,6 +295,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -403,6 +413,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Restore Release Bucket run: | @@ -427,6 +439,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index 8e3a0c98d9f..c84ade07636 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -104,6 +104,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Release Patch uses: actions/download-artifact@v3 @@ -187,6 +189,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 383bc3efe44..b7cc93d5e8c 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -91,6 +91,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Test Matrix id: generate-matrix diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index 706f4a0d6b5..ce5ac179a7d 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -101,6 +101,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Test Matrix id: generate-matrix @@ -169,6 +171,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Download testrun-changed-files.txt if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 208007cf304..7c2dbbec79e 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -88,6 +88,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Package Test Matrix id: generate-pkg-matrix diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index 0f80439d36d..b7d39a533f2 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -95,6 +95,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Generate Package Test Matrix id: generate-pkg-matrix @@ -162,6 +164,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Get Salt Project GitHub Actions Bot Environment run: | From fce51983b3c1ce1d0bb214a9d18f25bacd5b1afd Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 14 Sep 2023 17:51:40 +0100 Subject: [PATCH 175/196] Migrated some `invoke` tasks to `python-tools-scripts` * `tasks/docs.py` -> `tools/precommit/docs.py` * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` Refs #64374 Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 113 +++++------- changelog/64374.fixed.md | 4 + setup.cfg | 14 +- tools/__init__.py | 4 +- {tasks => tools/precommit}/docs.py | 189 +++++++++---------- {tasks => tools/precommit}/docstrings.py | 219 +++++++++++------------ 6 files changed, 259 insertions(+), 284 deletions(-) create mode 100644 changelog/64374.fixed.md rename {tasks => tools/precommit}/docs.py (71%) rename {tasks => tools/precommit}/docstrings.py (87%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index aee06411e53..038c8c1344d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -55,12 +55,7 @@ repos: - pre-commit - changelog - pre-commit-checks - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0.1 - - jinja2==3.1.2 - - packaging==23.0 - - virustotal3==1.0.8 + - id: tools alias: generate-workflows name: Generate GitHub Workflow Templates @@ -70,12 +65,7 @@ repos: - pre-commit - workflows - generate-workflows - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0.1 - - jinja2==3.1.2 - - packaging==23.0 - - virustotal3==1.0.8 + - id: tools alias: actionlint name: Lint GitHub Actions Workflows @@ -86,18 +76,51 @@ repos: - pre-commit - workflows - actionlint - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0.1 - - jinja2==3.1.2 - - packaging==23.0 - - virustotal3==1.0.8 + + - id: tools + alias: check-docs + name: Check Docs + files: ^(salt/.*\.py|doc/ref/.*\.rst)$ + args: + - pre-commit + - docs + - check + + - id: tools + alias: check-docstrings + name: Check docstrings + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - docstrings + - check + + - id: tools + alias: check-known-missing-docstrings + name: Check Known Missing Docstrings + stages: [manual] + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - docstrings + - check + + # ----- Packaging Requirements ------------------------------------------------------------------------------------> - repo: https://github.com/saltstack/pip-tools-compile-impersonate rev: "4.8" hooks: - # ----- Packaging Requirements ------------------------------------------------------------------------------------> - id: pip-tools-compile alias: compile-pkg-linux-3.7-zmq-requirements name: Linux Packaging Py3.7 ZeroMQ Requirements @@ -1205,24 +1228,6 @@ repos: # <---- Security --------------------------------------------------------------------------------------------------- # ----- Pre-Commit ------------------------------------------------------------------------------------------------> - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-docs - name: Check Docs - files: ^(salt/.*\.py|doc/ref/.*\.rst)$ - args: - - docs.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - repo: https://github.com/saltstack/invoke-pre-commit rev: v1.9.0 hooks: @@ -1242,9 +1247,6 @@ repos: - packaging - looseversion - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - id: invoke alias: loader-check-virtual name: Check loader modules __virtual__ @@ -1265,29 +1267,6 @@ repos: - packaging - looseversion - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-docstrings - name: Check docstrings - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - docstrings.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - repo: https://github.com/saltstack/invoke-pre-commit rev: v1.9.0 hooks: @@ -1314,13 +1293,17 @@ repos: - looseversion - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.0 + rev: v1.3.0 hooks: - id: mypy alias: mypy-tools name: Run mypy against tools files: ^tools/.*\.py$ - #args: [--strict] + exclude: > + (?x)^( + templates/.*| + salt/.*| + )$ additional_dependencies: - attrs - rich diff --git a/changelog/64374.fixed.md b/changelog/64374.fixed.md new file mode 100644 index 00000000000..479dc6c8c1b --- /dev/null +++ b/changelog/64374.fixed.md @@ -0,0 +1,4 @@ +Migrated some [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). + +* `tasks/docs.py` -> `tools/precommit/docs.py` +* `tasks/docstrings.py` -> `tools/precommit/docstrings.py` diff --git a/setup.cfg b/setup.cfg index f99baf45528..2f452d87695 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,10 +3,22 @@ owner = root group = root [mypy] +packages = tools +exclude = (?x)( + salt + | tests + ).*\.py implicit_optional = True show_error_codes = True warn_return_any = True warn_unused_configs = True -[mypy.tools] +[mypy-tools.*] +ignore_missing_imports = True + +[mypy-tools.precommit.docstrings] +follow_imports = silent + +[mypy-salt.*] +follow_imports = silent ignore_missing_imports = True diff --git a/tools/__init__.py b/tools/__init__.py index 22be82c40de..f78eaf92a2c 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -42,9 +42,11 @@ ptscripts.register_tools_module("tools.pkg.repo.publish") ptscripts.register_tools_module("tools.precommit") ptscripts.register_tools_module("tools.precommit.changelog") ptscripts.register_tools_module("tools.precommit.workflows") +ptscripts.register_tools_module("tools.precommit.docs") +ptscripts.register_tools_module("tools.precommit.docstrings") +ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) ptscripts.register_tools_module("tools.testsuite") ptscripts.register_tools_module("tools.testsuite.download") -ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) ptscripts.register_tools_module("tools.vm") for name in ("boto3", "botocore", "urllib3"): diff --git a/tasks/docs.py b/tools/precommit/docs.py similarity index 71% rename from tasks/docs.py rename to tools/precommit/docs.py index 323d14a0a1f..a549a6cecf3 100644 --- a/tasks/docs.py +++ b/tools/precommit/docs.py @@ -1,9 +1,8 @@ """ - tasks.docstrings - ~~~~~~~~~~~~~~~~ - - Check salt code base for for missing or wrong docstrings +Check salt code base for for missing or wrong docs """ +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations import ast import collections @@ -11,21 +10,18 @@ import os import pathlib import re -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group -from tasks import utils +import tools.utils -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -DOCS_DIR = CODE_DIR / "doc" -SALT_CODE_DIR = CODE_DIR / "salt" +DOCS_DIR = tools.utils.REPO_ROOT / "doc" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" -os.chdir(str(CODE_DIR)) - -python_module_to_doc_path = {} -doc_path_to_python_module = {} +PYTHON_MODULE_TO_DOC_PATH = {} +DOC_PATH_TO_PYTHON_MODULE = {} -check_paths = ( +CHECK_PATHS = ( "salt/auth", "salt/beacons", "salt/cache", @@ -52,12 +48,14 @@ check_paths = ( "salt/tops", "salt/wheel", ) -exclude_paths = ( +EXCLUDE_PATHS = ( "salt/cloud/cli.py", "salt/cloud/exceptions.py", "salt/cloud/libcloudfuncs.py", ) +cgroup = command_group(name="docs", help=__doc__, parent="pre-commit") + def build_path_cache(): """ @@ -65,13 +63,13 @@ def build_path_cache(): """ for path in SALT_CODE_DIR.rglob("*.py"): - path = path.resolve().relative_to(CODE_DIR) + path = path.resolve().relative_to(tools.utils.REPO_ROOT) strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue parts = list(path.parts) @@ -113,32 +111,21 @@ def build_path_cache(): / "all" / str(path).replace(".py", ".rst").replace(os.sep, ".") ) - stub_path = stub_path.relative_to(CODE_DIR) - python_module_to_doc_path[path] = stub_path + stub_path = stub_path.relative_to(tools.utils.REPO_ROOT) + PYTHON_MODULE_TO_DOC_PATH[path] = stub_path if path.exists(): - doc_path_to_python_module[stub_path] = path + DOC_PATH_TO_PYTHON_MODULE[stub_path] = path build_path_cache() def build_file_list(files, extension): - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - for spath in path.split(): - if not spath.endswith(extension): - continue - _files.append(spath) - if not _files: - _files = CODE_DIR.rglob("*{}".format(extension)) + if not files: + _files = tools.utils.REPO_ROOT.rglob("*{}".format(extension)) else: - _files = [pathlib.Path(fname).resolve() for fname in _files] - _files = [path.relative_to(CODE_DIR) for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == extension] + _files = [path.relative_to(tools.utils.REPO_ROOT) for path in _files] return _files @@ -148,9 +135,9 @@ def build_python_module_paths(files): strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue _files.append(path) return _files @@ -160,8 +147,7 @@ def build_docs_paths(files): return build_file_list(files, ".rst") -@task(iterable=["files"], positional=["files"]) -def check_inline_markup(ctx, files): +def check_inline_markup(ctx: Context, files: list[pathlib.Path]) -> int: """ Check docstring for :doc: usage @@ -174,9 +160,6 @@ def check_inline_markup(ctx, files): https://github.com/saltstack/salt/issues/12788 """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - files = build_python_module_paths(files) exitcode = 0 @@ -188,18 +171,14 @@ def check_inline_markup(ctx, files): if not docstring: continue if ":doc:" in docstring: - utils.error( - "The {} function in {} contains ':doc:' usage", funcdef.name, path + ctx.error( + f"The {funcdef.name} function in {path} contains ':doc:' usage" ) exitcode += 1 return exitcode -@task(iterable=["files"]) -def check_stubs(ctx, files): - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - +def check_stubs(ctx: Context, files: list[pathlib.Path]) -> int: files = build_python_module_paths(files) exitcode = 0 @@ -207,21 +186,20 @@ def check_stubs(ctx, files): strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue - stub_path = python_module_to_doc_path[path] + stub_path = PYTHON_MODULE_TO_DOC_PATH[path] if not stub_path.exists(): exitcode += 1 - utils.error( - "The module at {} does not have a sphinx stub at {}", path, stub_path + ctx.error( + f"The module at {path} does not have a sphinx stub at {stub_path}" ) return exitcode -@task(iterable=["files"]) -def check_virtual(ctx, files): +def check_virtual(ctx: Context, files: list[pathlib.Path]) -> int: """ Check if .rst files for each module contains the text ".. _virtual" indicating it is a virtual doc page, and, in case a module exists by @@ -235,22 +213,16 @@ def check_virtual(ctx, files): try: contents = path.read_text() except Exception as exc: # pylint: disable=broad-except - utils.error( - "Error while processing '{}': {}".format( - path, - exc, - ) - ) + ctx.error(f"Error while processing '{path}': {exc}") exitcode += 1 continue if ".. _virtual-" in contents: try: - python_module = doc_path_to_python_module[path] - utils.error( - "The doc file at {} indicates that it's virtual, yet, there's a" - " python module at {} that will shaddow it.", - path, - python_module, + python_module = DOC_PATH_TO_PYTHON_MODULE[path] + ctx.error( + f"The doc file at {path} indicates that it's virtual, yet, " + f"there's a python module at {python_module} that will " + "shaddow it.", ) exitcode += 1 except KeyError: @@ -259,8 +231,7 @@ def check_virtual(ctx, files): return exitcode -@task(iterable=["files"]) -def check_module_indexes(ctx, files): +def check_module_indexes(ctx: Context, files: list[pathlib.Path]) -> int: exitcode = 0 files = build_docs_paths(files) for path in files: @@ -288,9 +259,8 @@ def check_module_indexes(ctx, files): ) if module_index != sorted(module_index): exitcode += 1 - utils.error( - "The autosummary mods in {} are not properly sorted. Please sort them.", - path, + ctx.error( + f"The autosummary mods in {path} are not properly sorted. Please sort them.", ) module_index_duplicates = [ @@ -298,8 +268,8 @@ def check_module_indexes(ctx, files): ] if module_index_duplicates: exitcode += 1 - utils.error( - "Module index {} contains duplicates: {}", path, module_index_duplicates + ctx.error( + f"Module index {path} contains duplicates: {module_index_duplicates}" ) # Let's check if all python modules are included in the index path_parts = list(path.parts) @@ -320,7 +290,7 @@ def check_module_indexes(ctx, files): package = "log_handlers" path_parts = [] python_package = SALT_CODE_DIR.joinpath(package, *path_parts).relative_to( - CODE_DIR + tools.utils.REPO_ROOT ) modules = set() for module in python_package.rglob("*.py"): @@ -358,26 +328,26 @@ def check_module_indexes(ctx, files): missing_modules_in_index = set(modules) - set(module_index) if missing_modules_in_index: exitcode += 1 - utils.error( - "The module index at {} is missing the following modules: {}", - path, - ", ".join(missing_modules_in_index), + ctx.error( + f"The module index at {path} is missing the following modules: " + f"{', '.join(missing_modules_in_index)}" ) extra_modules_in_index = set(module_index) - set(modules) if extra_modules_in_index: exitcode += 1 - utils.error( - "The module index at {} has extra modules(non existing): {}", - path, - ", ".join(extra_modules_in_index), + ctx.error( + f"The module index at {path} has extra modules(non existing): " + f"{', '.join(extra_modules_in_index)}" ) return exitcode -@task(iterable=["files"]) -def check_stray(ctx, files): +def check_stray(ctx: Context, files: list[pathlib.Path]) -> int: exitcode = 0 - exclude_paths = ( + exclude_pathlib_paths: tuple[pathlib.Path, ...] + exclude_paths: tuple[str, ...] + + exclude_pathlib_paths = ( DOCS_DIR / "_inc", DOCS_DIR / "ref" / "cli" / "_includes", DOCS_DIR / "ref" / "cli", @@ -412,41 +382,50 @@ def check_stray(ctx, files): DOCS_DIR / "ref" / "states" / "writing.rst", DOCS_DIR / "topics", ) - exclude_paths = tuple(str(p.relative_to(CODE_DIR)) for p in exclude_paths) + exclude_paths = tuple( + str(p.relative_to(tools.utils.REPO_ROOT)) for p in exclude_pathlib_paths + ) files = build_docs_paths(files) for path in files: - if not str(path).startswith(str((DOCS_DIR / "ref").relative_to(CODE_DIR))): + if not str(path).startswith( + str((DOCS_DIR / "ref").relative_to(tools.utils.REPO_ROOT)) + ): continue if str(path).startswith(exclude_paths): continue if path.name in ("index.rst", "glossary.rst", "faq.rst", "README.rst"): continue - try: - python_module = doc_path_to_python_module[path] - except KeyError: + if path not in DOC_PATH_TO_PYTHON_MODULE: contents = path.read_text() if ".. _virtual-" in contents: continue exitcode += 1 - utils.error( - "The doc at {} doesn't have a corresponding python module and is" - " considered a stray doc. Please remove it.", - path, + ctx.error( + f"The doc at {path} doesn't have a corresponding python module " + "and is considered a stray doc. Please remove it." ) return exitcode -@task(iterable=["files"]) -def check(ctx, files): +@cgroup.command( + name="check", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + } + }, +) +def check(ctx: Context, files: list[pathlib.Path]) -> None: exitcode = 0 - utils.info("Checking inline :doc: markup") + ctx.info("Checking inline :doc: markup") exitcode += check_inline_markup(ctx, files) - utils.info("Checking python module stubs") + ctx.info("Checking python module stubs") exitcode += check_stubs(ctx, files) - utils.info("Checking virtual modules") + ctx.info("Checking virtual modules") exitcode += check_virtual(ctx, files) - utils.info("Checking stray docs") + ctx.info("Checking stray docs") exitcode += check_stray(ctx, files) - utils.info("Checking doc module indexes") + ctx.info("Checking doc module indexes") exitcode += check_module_indexes(ctx, files) - utils.exit_invoke(exitcode) + ctx.exit(exitcode) diff --git a/tasks/docstrings.py b/tools/precommit/docstrings.py similarity index 87% rename from tasks/docstrings.py rename to tools/precommit/docstrings.py index 3aed5c7fa87..37aea8b8c16 100644 --- a/tasks/docstrings.py +++ b/tools/precommit/docstrings.py @@ -1,10 +1,10 @@ """ - tasks.docstrings - ~~~~~~~~~~~~~~~~ - - Docstrings related tasks +Check salt code base for for missing or wrong docstrings. """ -# pylint: disable=resource-leakage +# Skip mypy checks since it will follow into Salt which doesn't yet have proper types defined +# mypy: ignore-errors +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations import ast import os @@ -13,16 +13,15 @@ import re import sys from typing import TYPE_CHECKING -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group +import tools.utils from salt.loader import SALT_INTERNAL_LOADERS_PATHS from salt.version import SaltStackVersion -from tasks import utils -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -SALT_CODE_DIR = CODE_DIR / "salt" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" SALT_MODULES_PATH = SALT_CODE_DIR / "modules" -THIS_FILE = pathlib.Path(__file__).relative_to(CODE_DIR) +THIS_FILE = pathlib.Path(__file__).relative_to(tools.utils.REPO_ROOT) MISSING_DOCSTRINGS = { "salt/auth/django.py": ["is_connection_usable"], @@ -141,7 +140,6 @@ MISSING_DOCSTRINGS = { "salt/pillar/gpg.py": ["ext_pillar"], "salt/pillar/makostack.py": ["ext_pillar"], "salt/pillar/nacl.py": ["ext_pillar"], - "salt/pillar/stack.py": ["ext_pillar"], "salt/proxy/cisconso.py": ["init"], "salt/proxy/esxi.py": ["is_connected_via_vcenter"], "salt/proxy/fx2.py": ["host"], @@ -297,7 +295,6 @@ MISSING_DOCSTRINGS = { "iter_entry_points", ], "salt/utils/error.py": ["pack_exception"], - "salt/utils/etcd_util.py": ["get_conn", "tree"], "salt/utils/find.py": ["path_depth"], "salt/utils/gzip_util.py": ["open_fileobj", "uncompress", "open"], "salt/utils/icinga2.py": ["get_certs_path"], @@ -308,7 +305,6 @@ MISSING_DOCSTRINGS = { "regex_escape", ], "salt/utils/listdiffer.py": ["list_diff"], - "salt/utils/master.py": ["get_master_key", "ping_all_connected_minions"], "salt/utils/namecheap.py": [ "atts_to_dict", "get_opts", @@ -332,7 +328,6 @@ MISSING_DOCSTRINGS = { ], "salt/utils/openstack/swift.py": ["mkdirs", "check_swift"], "salt/utils/pkg/__init__.py": ["split_comparison"], - "salt/utils/process.py": ["systemd_notify_call", "default_signals"], "salt/utils/profile.py": ["activate_profile", "output_profile"], "salt/utils/pyobjects.py": ["need_salt"], "salt/utils/reclass.py": [ @@ -360,13 +355,6 @@ MISSING_DOCSTRINGS = { "salt/utils/ssh.py": ["key_is_encrypted"], "salt/utils/stringio.py": ["is_writable", "is_stringio", "is_readable"], "salt/utils/stringutils.py": ["random"], - "salt/utils/templates.py": [ - "wrap_tmpl_func", - "render_mako_tmpl", - "render_jinja_tmpl", - "render_wempy_tmpl", - ], - "salt/utils/verify.py": ["verify_logs_filter"], "salt/utils/virtualbox.py": [ "machine_get_machinestate_str", "machine_get_machinestate_tuple", @@ -380,13 +368,10 @@ MISSING_DOCSTRINGS = { ], "salt/utils/yamlloader.py": ["load"], "salt/utils/yamlloader_old.py": ["load"], - "salt/utils/zeromq.py": ["check_ipc_path_max_len"], } MISSING_EXAMPLES = { "salt/modules/acme.py": ["has", "renew_by", "needs_renewal"], - "salt/modules/ansiblegate.py": ["help", "list_"], "salt/modules/apkpkg.py": ["purge"], - "salt/modules/aptpkg.py": ["expand_repo_def"], "salt/modules/arista_pyeapi.py": ["get_connection"], "salt/modules/artifactory.py": [ "get_latest_release", @@ -475,7 +460,6 @@ MISSING_EXAMPLES = { "salt/modules/boto_ssm.py": ["get_parameter", "delete_parameter", "put_parameter"], "salt/modules/capirca_acl.py": ["get_filter_pillar", "get_term_pillar"], "salt/modules/ceph.py": ["zap"], - "salt/modules/chroot.py": ["exist"], "salt/modules/ciscoconfparse_mod.py": [ "find_objects", "find_objects_wo_child", @@ -489,7 +473,6 @@ MISSING_EXAMPLES = { "set_data_value", "apply_rollback", ], - "salt/modules/cp.py": ["envs", "recv", "recv_chunked"], "salt/modules/cryptdev.py": ["active"], "salt/modules/datadog_api.py": ["post_event"], "salt/modules/defaults.py": ["deepcopy", "update"], @@ -608,7 +591,6 @@ MISSING_EXAMPLES = { "salt/modules/napalm_probes.py": ["delete_probes", "schedule_probes", "set_probes"], "salt/modules/netbox.py": ["get_", "filter_", "slugify"], "salt/modules/netmiko_mod.py": ["call", "multi_call", "get_connection"], - "salt/modules/network.py": ["fqdns"], "salt/modules/neutronng.py": [ "get_openstack_cloud", "compare_changes", @@ -763,21 +745,13 @@ MISSING_EXAMPLES = { "register_vm", "get_vm_config", "get_vm_config_file", - "list_licenses", "compare_vm_configs", "get_advanced_configs", "delete_advanced_configs", - "create_vmfs_datastore", "get_vm", ], "salt/modules/win_pkg.py": ["get_package_info"], "salt/modules/win_timezone.py": ["zone_compare"], - "salt/modules/zabbix.py": [ - "substitute_params", - "get_zabbix_id_mapper", - "get_object_id_by_params", - "compare_params", - ], "salt/modules/zk_concurrency.py": [ "lock", "party_members", @@ -827,8 +801,17 @@ you've made already. Whatever approach you decide to take, just drop a comment in the PR letting us know! """ +cgroup = command_group(name="docstrings", help=__doc__, parent="pre-commit") -def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: str): + +def annotate( + ctx: Context, + kind: str, + fpath: pathlib.Path, + start_lineno: int, + end_lineno: int, + message: str, +) -> None: if kind not in ("warning", "error"): raise RuntimeError("The annotation kind can only be one of 'warning', 'error'.") if os.environ.get("GH_ACTIONS_ANNOTATE") is None: @@ -836,7 +819,7 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: github_output = os.environ.get("GITHUB_OUTPUT") if github_output is None: - utils.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.") + ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.") return if TYPE_CHECKING: @@ -846,40 +829,52 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: message.rstrip().replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A") ) # Print it to stdout so that the GitHub runner pick's it up and adds the annotation - print( + ctx.print( f"::{kind} file={fpath},line={start_lineno},endLine={end_lineno}::{message}", file=sys.stdout, flush=True, ) -@task(iterable=["files"], positional=["files"]) -def check(ctx, files, check_proper_formatting=False, error_on_known_failures=False): +@cgroup.command( + name="check", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + }, + "suppress_warnings": { + "help": "Supress warning messages on known issues", + }, + "check_proper_formatting": { + "help": "Run formatting checks on docstrings", + }, + "error_on_known_failures": { + "help": "Raise an error on known failures", + }, + }, +) +def check_docstrings( + ctx: Context, + files: list[pathlib.Path], + suppress_warnings: bool = False, + check_proper_formatting: bool = False, + error_on_known_failures: bool = False, +) -> None: """ Check salt's docstrings """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - _files.extend(path.split()) - if not _files: + if not files: _files = SALT_CODE_DIR.rglob("*.py") else: - _files = [pathlib.Path(fname) for fname in _files] - - _files = [path.resolve() for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] errors = 0 exitcode = 0 warnings = 0 for path in _files: + if str(path).startswith(str(tools.utils.REPO_ROOT / "salt" / "ext")): + continue contents = path.read_text() try: module = ast.parse(path.read_text(), filename=str(path)) @@ -889,10 +884,11 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if error: errors += 1 exitcode = 1 - utils.error( - "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", - path.relative_to(CODE_DIR), - *error, + ctx.error( + "The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format( + path.relative_to(tools.utils.REPO_ROOT), + *error, + ) ) for funcdef in [ @@ -904,17 +900,19 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if error: errors += 1 exitcode = 1 - utils.error( - "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", - path.relative_to(CODE_DIR), - *error, + ctx.error( + "The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format( + path, + *error, + ) ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path, funcdef.lineno, funcdef.body[0].lineno, - "Version {1:r!} is not valid for {0!r}".format(*error), + "Version {1!r} is not valid for {0!r}".format(*error), ) if not str(path).startswith(SALT_INTERNAL_LOADERS_PATHS): @@ -922,7 +920,7 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue funcname = funcdef.name - relpath = str(path.relative_to(CODE_DIR)) + relpath = str(path.relative_to(tools.utils.REPO_ROOT)) # We're dealing with a salt loader module if funcname.startswith("_"): @@ -935,14 +933,14 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal and error_on_known_failures is False ): warnings += 1 - utils.warn( - "The function '{}' on '{}' does not have a docstring", - funcname, - relpath, - ) + if suppress_warnings is False: + ctx.warn( + f"The function '{funcname}' on '{relpath}' does not have a docstring" + ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing docstring", @@ -950,14 +948,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue errors += 1 exitcode = 1 - utils.error( - "The function '{}' on '{}' does not have a docstring", - funcname, - relpath, + ctx.error( + f"The function '{funcname}' on '{relpath}' does not have a docstring" ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing docstring", @@ -966,14 +963,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal elif funcname in MISSING_DOCSTRINGS.get(relpath, ()): # This was previously a know function with a missing docstring. # Warn about it so that it get's removed from this list - warnings += 1 - utils.warn( - "The function '{}' on '{}' was previously known to not have a docstring, " - "which is no longer the case. Please remove it from 'MISSING_DOCSTRINGS' ." - "in '{}'", - funcname, - relpath, - THIS_FILE, + errors += 1 + exitcode = 1 + ctx.error( + f"The function '{funcname}' on '{relpath}' was previously known to not " + "have a docstring, which is no longer the case. Please remove it from " + f"'MISSING_DOCSTRINGS' in '{THIS_FILE}'" ) try: @@ -993,14 +988,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal and error_on_known_failures is False ): warnings += 1 - utils.warn( - "The function '{}' on '{}' does not have a 'CLI Example:' in its docstring", - funcname, - relpath, - ) + if suppress_warnings is False: + ctx.warn( + f"The function '{funcname}' on '{relpath}' does not have a " + "'CLI Example:' in its docstring" + ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing 'CLI Example:' in docstring", @@ -1008,14 +1004,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue errors += 1 exitcode = 1 - utils.error( - "The function '{}' on '{}' does not have a 'CLI Example:' in its docstring", - funcname, - relpath, + ctx.error( + f"The function '{funcname}' on '{relpath}' does not have a 'CLI Example:' in its docstring" ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing 'CLI Example:' in docstring", @@ -1024,14 +1019,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal elif funcname in MISSING_EXAMPLES.get(relpath, ()): # This was previously a know function with a missing CLI example # Warn about it so that it get's removed from this list - warnings += 1 - utils.warn( - "The function '{}' on '{}' was previously known to not have a CLI Example, " - "which is no longer the case. Please remove it from 'MISSING_EXAMPLES'. " - "in '{}'", - funcname, - relpath, - THIS_FILE, + errors += 1 + exitcode = 1 + ctx.error( + f"The function '{funcname}' on '{relpath}' was previously known to not " + "have a CLI Example, which is no longer the case. Please remove it from " + f"'MISSING_EXAMPLES' in '{THIS_FILE}'" ) if check_proper_formatting is False: @@ -1042,20 +1035,22 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if _check_cli_example_proper_formatting(docstring) is False: errors += 1 exitcode = 1 - utils.error( + ctx.error( "The function {!r} on '{}' does not have a proper 'CLI Example:' section in " "its docstring. The proper format is:\n" "CLI Example:\n" "\n" ".. code-block:: bash\n" "\n" - " salt '*' \n", - funcdef.name, - path.relative_to(CODE_DIR), + " salt '*' \n".format( + funcdef.name, + path.relative_to(tools.utils.REPO_ROOT), + ) ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Wrong format in 'CLI Example:' in docstring.\n" @@ -1072,15 +1067,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal path.write_text(contents) if warnings: - utils.warn("Found {} warnings", warnings) + ctx.warn(f"Found {warnings} warnings") if exitcode: - utils.error("Found {} errors", errors) + ctx.error(f"Found {errors} errors") if os.environ.get("GH_ACTIONS_ANNOTATE") and (warnings or errors): github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") if github_step_summary: with open(github_step_summary, "w", encoding="utf-8") as wfh: wfh.write(SUMMARY) - utils.exit_invoke(exitcode) + ctx.exit(exitcode) CHECK_VALID_VERSION_RE = re.compile( From 06756cc08c27a451c777939d9ff928264d1af0b4 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 15 Nov 2023 16:14:39 +0000 Subject: [PATCH 176/196] Migrate `tasks/loader.py` -> `tools/precommit/loader.py` Refs #64374 Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 34 ++++++------- changelog/64374.fixed.md | 1 + tools/__init__.py | 1 + tools/precommit/__init__.py | 40 +++++++++++++++ tools/precommit/docstrings.py | 4 +- {tasks => tools/precommit}/loader.py | 74 ++++++++++++++-------------- 6 files changed, 94 insertions(+), 60 deletions(-) rename {tasks => tools/precommit}/loader.py (58%) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 038c8c1344d..7cf56cf8c9d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -115,6 +115,20 @@ repos: - docstrings - check + - id: tools + alias: loader-check-virtual + name: Check loader modules __virtual__ + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - salt-loaders + - check-virtual + # ----- Packaging Requirements ------------------------------------------------------------------------------------> - repo: https://github.com/saltstack/pip-tools-compile-impersonate @@ -1247,26 +1261,6 @@ repos: - packaging - looseversion - - id: invoke - alias: loader-check-virtual - name: Check loader modules __virtual__ - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - loader.check-virtual - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - repo: https://github.com/saltstack/invoke-pre-commit rev: v1.9.0 hooks: diff --git a/changelog/64374.fixed.md b/changelog/64374.fixed.md index 479dc6c8c1b..8b94be869d7 100644 --- a/changelog/64374.fixed.md +++ b/changelog/64374.fixed.md @@ -2,3 +2,4 @@ Migrated some [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scri * `tasks/docs.py` -> `tools/precommit/docs.py` * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` +* `tasks/loader.py` -> `tools/precommit/loader.py` diff --git a/tools/__init__.py b/tools/__init__.py index f78eaf92a2c..1b34b867966 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -44,6 +44,7 @@ ptscripts.register_tools_module("tools.precommit.changelog") ptscripts.register_tools_module("tools.precommit.workflows") ptscripts.register_tools_module("tools.precommit.docs") ptscripts.register_tools_module("tools.precommit.docstrings") +ptscripts.register_tools_module("tools.precommit.loader") ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) ptscripts.register_tools_module("tools.testsuite") ptscripts.register_tools_module("tools.testsuite.download") diff --git a/tools/precommit/__init__.py b/tools/precommit/__init__.py index 57d9d1ae62a..c10eadeb479 100644 --- a/tools/precommit/__init__.py +++ b/tools/precommit/__init__.py @@ -3,7 +3,47 @@ These commands, and sub-commands, are used by pre-commit. """ from ptscripts import command_group +import tools.utils + # Define the command group cgroup = command_group( name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ ) + +SALT_BASE_PATH = tools.utils.REPO_ROOT / "salt" + +SALT_INTERNAL_LOADERS_PATHS = ( + # This is a 1:1 copy of SALT_INTERNAL_LOADERS_PATHS found in salt/loader/__init__.py + str(SALT_BASE_PATH / "auth"), + str(SALT_BASE_PATH / "beacons"), + str(SALT_BASE_PATH / "cache"), + str(SALT_BASE_PATH / "client" / "ssh" / "wrapper"), + str(SALT_BASE_PATH / "cloud" / "clouds"), + str(SALT_BASE_PATH / "engines"), + str(SALT_BASE_PATH / "executors"), + str(SALT_BASE_PATH / "fileserver"), + str(SALT_BASE_PATH / "grains"), + str(SALT_BASE_PATH / "log_handlers"), + str(SALT_BASE_PATH / "matchers"), + str(SALT_BASE_PATH / "metaproxy"), + str(SALT_BASE_PATH / "modules"), + str(SALT_BASE_PATH / "netapi"), + str(SALT_BASE_PATH / "output"), + str(SALT_BASE_PATH / "pillar"), + str(SALT_BASE_PATH / "proxy"), + str(SALT_BASE_PATH / "queues"), + str(SALT_BASE_PATH / "renderers"), + str(SALT_BASE_PATH / "returners"), + str(SALT_BASE_PATH / "roster"), + str(SALT_BASE_PATH / "runners"), + str(SALT_BASE_PATH / "sdb"), + str(SALT_BASE_PATH / "serializers"), + str(SALT_BASE_PATH / "spm" / "pkgdb"), + str(SALT_BASE_PATH / "spm" / "pkgfiles"), + str(SALT_BASE_PATH / "states"), + str(SALT_BASE_PATH / "thorium"), + str(SALT_BASE_PATH / "tokens"), + str(SALT_BASE_PATH / "tops"), + str(SALT_BASE_PATH / "utils"), + str(SALT_BASE_PATH / "wheel"), +) diff --git a/tools/precommit/docstrings.py b/tools/precommit/docstrings.py index 37aea8b8c16..9cbc5a848d0 100644 --- a/tools/precommit/docstrings.py +++ b/tools/precommit/docstrings.py @@ -16,8 +16,8 @@ from typing import TYPE_CHECKING from ptscripts import Context, command_group import tools.utils -from salt.loader import SALT_INTERNAL_LOADERS_PATHS from salt.version import SaltStackVersion +from tools.precommit import SALT_INTERNAL_LOADERS_PATHS SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" SALT_MODULES_PATH = SALT_CODE_DIR / "modules" @@ -865,7 +865,7 @@ def check_docstrings( Check salt's docstrings """ if not files: - _files = SALT_CODE_DIR.rglob("*.py") + _files = list(SALT_CODE_DIR.rglob("*.py")) else: _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] diff --git a/tasks/loader.py b/tools/precommit/loader.py similarity index 58% rename from tasks/loader.py rename to tools/precommit/loader.py index d65e5e28591..bbec9c00f92 100644 --- a/tasks/loader.py +++ b/tools/precommit/loader.py @@ -1,24 +1,35 @@ """ - tasks.loader - ~~~~~~~~~~~~ - - Salt loader checks +Salt loader checks """ import ast import pathlib -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group -from salt.loader import SALT_INTERNAL_LOADERS_PATHS -from tasks import utils +import tools.utils +from tools.precommit import SALT_INTERNAL_LOADERS_PATHS -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -SALT_CODE_DIR = CODE_DIR / "salt" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" + +cgroup = command_group(name="salt-loaders", help=__doc__, parent="pre-commit") -@task(iterable=["files"], positional=["files"]) -def check_virtual(ctx, files, enforce_virtualname=False): +@cgroup.command( + name="check-virtual", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + }, + "enforce_virtualname": { + "help": "Enforce the usage of `__virtualname__`", + }, + }, +) +def check_virtual( + ctx: Context, files: list[pathlib.Path], enforce_virtualname: bool = False +) -> None: """ Check Salt loader modules for a defined `__virtualname__` attribute and `__virtual__` function. @@ -26,23 +37,10 @@ def check_virtual(ctx, files, enforce_virtualname=False): https://github.com/saltstack/salt/blob/27ae8260983b11fe6e32a18e777d550be9fe1dc2/tests/unit/test_virtualname.py """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - _files.extend(path.split()) - if not _files: - _files = SALT_CODE_DIR.rglob("*.py") + if not files: + _files = list(SALT_CODE_DIR.rglob("*.py")) else: - _files = [pathlib.Path(fname) for fname in _files] - - _files = [path.resolve() for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] errors = 0 exitcode = 0 @@ -78,14 +76,15 @@ def check_virtual(ctx, files, enforce_virtualname=False): continue if target.id == "__virtualname__": found_virtualname_attr = True - if node.value.s not in path.name: + if node.value.s not in path.name: # type: ignore[attr-defined] errors += 1 exitcode = 1 - utils.error( + ctx.error( 'The value of the __virtualname__ attribute, "{}"' - " is not part of {}", - node.value.s, - path.name, + " is not part of {}".format( + node.value.s, # type: ignore[attr-defined] + path.name, + ) ) if found_virtualname_attr: break @@ -93,11 +92,10 @@ def check_virtual(ctx, files, enforce_virtualname=False): if not found_virtualname_attr and enforce_virtualname: errors += 1 exitcode = 1 - utils.error( - "The salt loader module {} defines a __virtual__() function but does" - " not define a __virtualname__ attribute", - path.relative_to(CODE_DIR), + ctx.error( + f"The salt loader module {path.relative_to(tools.utils.REPO_ROOT)} defines " + "a __virtual__() function but does not define a __virtualname__ attribute" ) if exitcode: - utils.error("Found {} errors", errors) - utils.exit_invoke(exitcode) + ctx.error(f"Found {errors} errors") + ctx.exit(exitcode) From eeaa88b4e969e820f9d0bfdbcfa22c7b5be27cee Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Mon, 5 Jun 2023 11:31:08 +0100 Subject: [PATCH 177/196] Migrated `tasks/filemap.py` -> `tools/precommit/filemap.py` Refs #64374 Signed-off-by: Pedro Algarvio --- changelog/64374.fixed.md | 1 + tasks/filemap.py | 95 -------------------------------------- tools/__init__.py | 1 + tools/precommit/filemap.py | 91 ++++++++++++++++++++++++++++++++++++ 4 files changed, 93 insertions(+), 95 deletions(-) delete mode 100644 tasks/filemap.py create mode 100644 tools/precommit/filemap.py diff --git a/changelog/64374.fixed.md b/changelog/64374.fixed.md index 8b94be869d7..e56ef803036 100644 --- a/changelog/64374.fixed.md +++ b/changelog/64374.fixed.md @@ -3,3 +3,4 @@ Migrated some [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scri * `tasks/docs.py` -> `tools/precommit/docs.py` * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` * `tasks/loader.py` -> `tools/precommit/loader.py` +* `tasks/filemap.py` -> `tools/precommit/filemap.py` diff --git a/tasks/filemap.py b/tasks/filemap.py deleted file mode 100644 index a1eb62c6b82..00000000000 --- a/tasks/filemap.py +++ /dev/null @@ -1,95 +0,0 @@ -""" - tasks.filemap - ~~~~~~~~~~~~~ - - tests/filename_map.yml validity checks -""" -import pathlib -import re - -import yaml -from invoke import task # pylint: disable=3rd-party-module-not-gated - -from tasks import utils - -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -FILENAME_MAP_PATH = CODE_DIR / "tests" / "filename_map.yml" - - -def _match_to_test_file(match): - tests_path = CODE_DIR / "tests" - parts = match.split(".") - parts[-1] += ".py" - return tests_path.joinpath(*parts).relative_to(CODE_DIR) - - -def _check_matches(rule, matches): - errors = 0 - for match in matches: - filematch = _match_to_test_file(match) - if not filematch.exists(): - utils.error( - "The match '{}' for rule '{}' points to a non existing test module" - " path: {}", - match, - rule, - filematch, - ) - errors += 1 - return errors - - -@task -def check(ctx): - exitcode = 0 - excludes = ("tasks/", "templates/", ".nox/") - full_filelist = [path.relative_to(CODE_DIR) for path in CODE_DIR.rglob("*.py")] - filelist = [ - str(path) for path in full_filelist if not str(path).startswith(excludes) - ] - filename_map = yaml.safe_load(FILENAME_MAP_PATH.read_text()) - checked = set() - for rule, matches in filename_map.items(): - if rule == "*": - exitcode += _check_matches(rule, matches) - elif "|" in rule: - # This is regex - for filepath in filelist: - if re.match(rule, filepath): - # Found at least one match, stop looking - break - else: - utils.error( - "Could not find a matching file in the salt repo for the rule '{}'", - rule, - ) - exitcode += 1 - continue - exitcode += _check_matches(rule, matches) - elif "*" in rule or "\\" in rule: - # Glob matching - process_matches = True - for filerule in CODE_DIR.glob(rule): - if not filerule.exists(): - utils.error( - "The rule '{}' points to a non existing path: {}", - rule, - filerule, - ) - exitcode += 1 - process_matches = False - if process_matches: - exitcode += _check_matches(rule, matches) - else: - # Direct file paths as rules - filerule = pathlib.Path(rule) - if not filerule.exists(): - utils.error( - "The rule '{}' points to a non existing path: {}", rule, filerule - ) - exitcode += 1 - continue - exitcode += _check_matches(rule, matches) - if exitcode: - utils.error("Found {} errors", exitcode) - utils.exit_invoke(exitcode) diff --git a/tools/__init__.py b/tools/__init__.py index 1b34b867966..8b08111dc8a 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -44,6 +44,7 @@ ptscripts.register_tools_module("tools.precommit.changelog") ptscripts.register_tools_module("tools.precommit.workflows") ptscripts.register_tools_module("tools.precommit.docs") ptscripts.register_tools_module("tools.precommit.docstrings") +ptscripts.register_tools_module("tools.precommit.filemap") ptscripts.register_tools_module("tools.precommit.loader") ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) ptscripts.register_tools_module("tools.testsuite") diff --git a/tools/precommit/filemap.py b/tools/precommit/filemap.py new file mode 100644 index 00000000000..96a662fa7e7 --- /dev/null +++ b/tools/precommit/filemap.py @@ -0,0 +1,91 @@ +""" +`tests/filename_map.yml` validity checks +""" +import pathlib +import re + +import yaml +from ptscripts import Context, command_group + +import tools.utils + +FILENAME_MAP_PATH = tools.utils.REPO_ROOT / "tests" / "filename_map.yml" + +cgroup = command_group(name="filemap", help=__doc__, parent="pre-commit") + + +def _match_to_test_file(match: str) -> pathlib.Path: + tests_path = tools.utils.REPO_ROOT / "tests" + parts = match.split(".") + parts[-1] += ".py" + return tests_path.joinpath(*parts).relative_to(tools.utils.REPO_ROOT) + + +def _check_matches(ctx: Context, rule: str, matches: list[str]) -> int: + errors = 0 + for match in matches: + filematch = _match_to_test_file(match) + if not filematch.exists(): + ctx.error( + f"The match '{match}' for rule '{rule}' points to a non " + f"existing test module path: {filematch}" + ) + errors += 1 + return errors + + +@cgroup.command( + name="check", +) +def check(ctx: Context) -> None: + exitcode = 0 + excludes = ("tools/", "templates/", ".nox/") + full_filelist = [ + path.relative_to(tools.utils.REPO_ROOT) + for path in tools.utils.REPO_ROOT.rglob("*.py") + ] + filelist = [ + str(path) for path in full_filelist if not str(path).startswith(excludes) + ] + filename_map = yaml.safe_load(FILENAME_MAP_PATH.read_text()) + for rule, matches in filename_map.items(): + if rule == "*": + exitcode += _check_matches(ctx, rule, matches) + elif "|" in rule: + # This is regex + for filepath in filelist: + if re.match(rule, filepath): + # Found at least one match, stop looking + break + else: + ctx.error( + f"Could not find a matching file in the salt repo for the rule '{rule}'" + ) + exitcode += 1 + continue + exitcode += _check_matches(ctx, rule, matches) + elif "*" in rule or "\\" in rule: + # Glob matching + process_matches = True + for filerule in tools.utils.REPO_ROOT.glob(rule): + if not filerule.exists(): + ctx.error( + f"The rule '{rule}' points to a non existing path: {filerule}" + ) + exitcode += 1 + process_matches = False + if process_matches: + exitcode += _check_matches(ctx, rule, matches) + else: + # Direct file paths as rules + filerule = pathlib.Path(rule) + if not filerule.exists(): + ctx.error( + f"The rule '{rule}' points to a non existing path: {filerule}" + ) + exitcode += 1 + continue + exitcode += _check_matches(ctx, rule, matches) + if exitcode: + ctx.error(f"Found {exitcode} errors") + ctx.exit(exitcode) From effd3da06fb4d39f99410000721dd87b8bdc2add Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 15 Nov 2023 16:17:39 +0000 Subject: [PATCH 178/196] Removed all remaining `invoke` support Fixes #64374 Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 104 +++-------------------- changelog/64374.fixed.md | 2 +- noxfile.py | 33 +------ requirements/static/ci/invoke.in | 5 -- requirements/static/ci/py3.10/invoke.txt | 18 ---- requirements/static/ci/py3.7/invoke.txt | 18 ---- requirements/static/ci/py3.8/invoke.txt | 18 ---- requirements/static/ci/py3.9/invoke.txt | 18 ---- tasks/README.md | 28 ------ tasks/__init__.py | 11 --- tasks/utils.py | 64 -------------- 11 files changed, 12 insertions(+), 307 deletions(-) delete mode 100644 requirements/static/ci/invoke.in delete mode 100644 requirements/static/ci/py3.10/invoke.txt delete mode 100644 requirements/static/ci/py3.7/invoke.txt delete mode 100644 requirements/static/ci/py3.8/invoke.txt delete mode 100644 requirements/static/ci/py3.9/invoke.txt delete mode 100644 tasks/README.md delete mode 100644 tasks/__init__.py delete mode 100644 tasks/utils.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7cf56cf8c9d..d52ea63f244 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -129,6 +129,16 @@ repos: - salt-loaders - check-virtual + - id: tools + alias: check-filemap + name: Check Filename Map Change Matching + files: ^tests/(filename_map\.yml|.*\.py)$ + pass_filenames: false + args: + - pre-commit + - filemap + - check + # ----- Packaging Requirements ------------------------------------------------------------------------------------> - repo: https://github.com/saltstack/pip-tools-compile-impersonate @@ -1039,56 +1049,6 @@ repos: - requirements/static/ci/changelog.in # <---- Changelog -------------------------------------------------------------------------------------------------- - # ----- Invoke ----------------------------------------------------------------------------------------------------> - - id: pip-tools-compile - alias: compile-ci-invoke-3.7-requirements - name: Linux CI Py3.7 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.7/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.7 - - --no-emit-index-url - - requirements/static/ci/invoke.in - - - id: pip-tools-compile - alias: compile-ci-invoke-3.8-requirements - name: Linux CI Py3.8 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.8/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.8 - - --no-emit-index-url - - requirements/static/ci/invoke.in - - - id: pip-tools-compile - alias: compile-ci-invoke-3.9-requirements - name: Linux CI Py3.9 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.9/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.9 - - --no-emit-index-url - - requirements/static/ci/invoke.in - - - id: pip-tools-compile - alias: compile-ci-invoke-3.10-requirements - name: Linux CI Py3.10 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.10/(invoke|linux)\.txt)$ - pass_filenames: false - args: - - -v - - --build-isolation - - --py-version=3.10 - - --no-emit-index-url - - requirements/static/ci/invoke.in - # <---- Invoke ----------------------------------------------------------------------------------------------------- - # ----- Tools ----------------------------------------------------------------------------------------------------> - id: pip-tools-compile alias: compile-ci-tools-3.9-requirements @@ -1242,50 +1202,6 @@ repos: # <---- Security --------------------------------------------------------------------------------------------------- # ----- Pre-Commit ------------------------------------------------------------------------------------------------> - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-filemap - name: Check Filename Map Change Matching - files: ^tests/(filename_map\.yml|.*\.py)$ - pass_filenames: false - args: - - filemap.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-known-missing-docstrings - name: Check Known Missing Docstrings - stages: [manual] - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - docstrings.check - - --error-on-known-failures - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0.1 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.3.0 hooks: diff --git a/changelog/64374.fixed.md b/changelog/64374.fixed.md index e56ef803036..31dfc9b1b1d 100644 --- a/changelog/64374.fixed.md +++ b/changelog/64374.fixed.md @@ -1,4 +1,4 @@ -Migrated some [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). +Migrated all [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). * `tasks/docs.py` -> `tools/precommit/docs.py` * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` diff --git a/noxfile.py b/noxfile.py index fddcf357f3e..c44ab354381 100644 --- a/noxfile.py +++ b/noxfile.py @@ -1536,7 +1536,7 @@ def lint_salt(session): paths = session.posargs else: # TBD replace paths entries when implement pyproject.toml - paths = ["setup.py", "noxfile.py", "salt/", "tasks/"] + paths = ["setup.py", "noxfile.py", "salt/"] _lint(session, ".pylintrc", flags, paths) @@ -1648,37 +1648,6 @@ def docs_man(session, compress, update, clean): os.chdir("..") -@nox.session(name="invoke", python="3") -def invoke(session): - """ - Run invoke tasks - """ - if _upgrade_pip_setuptools_and_wheel(session): - _install_requirements(session) - requirements_file = os.path.join( - "requirements", "static", "ci", _get_pydir(session), "invoke.txt" - ) - install_command = ["--progress-bar=off", "-r", requirements_file] - session.install(*install_command, silent=PIP_INSTALL_SILENT) - - cmd = ["inv"] - files = [] - - # Unfortunately, invoke doesn't support the nargs functionality like argpase does. - # Let's make it behave properly - for idx, posarg in enumerate(session.posargs): - if idx == 0: - cmd.append(posarg) - continue - if posarg.startswith("--"): - cmd.append(posarg) - continue - files.append(posarg) - if files: - cmd.append("--files={}".format(" ".join(files))) - session.run(*cmd) - - @nox.session(name="changelog", python="3") @nox.parametrize("draft", [False, True]) @nox.parametrize("force", [False, True]) diff --git a/requirements/static/ci/invoke.in b/requirements/static/ci/invoke.in deleted file mode 100644 index 4b924892386..00000000000 --- a/requirements/static/ci/invoke.in +++ /dev/null @@ -1,5 +0,0 @@ ---constraint=./py{py_version}/{platform}.txt - -invoke -blessings -pyyaml diff --git a/requirements/static/ci/py3.10/invoke.txt b/requirements/static/ci/py3.10/invoke.txt deleted file mode 100644 index fbeaead2dd4..00000000000 --- a/requirements/static/ci/py3.10/invoke.txt +++ /dev/null @@ -1,18 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/invoke.txt requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==6.0.1 - # via - # -c requirements/static/ci/py3.10/linux.txt - # -r requirements/static/ci/invoke.in -six==1.16.0 - # via - # -c requirements/static/ci/py3.10/linux.txt - # blessings diff --git a/requirements/static/ci/py3.7/invoke.txt b/requirements/static/ci/py3.7/invoke.txt deleted file mode 100644 index dfc00dd752f..00000000000 --- a/requirements/static/ci/py3.7/invoke.txt +++ /dev/null @@ -1,18 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/invoke.txt requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==6.0.1 - # via - # -c requirements/static/ci/py3.7/linux.txt - # -r requirements/static/ci/invoke.in -six==1.16.0 - # via - # -c requirements/static/ci/py3.7/linux.txt - # blessings diff --git a/requirements/static/ci/py3.8/invoke.txt b/requirements/static/ci/py3.8/invoke.txt deleted file mode 100644 index 11ecca4806f..00000000000 --- a/requirements/static/ci/py3.8/invoke.txt +++ /dev/null @@ -1,18 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/invoke.txt requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==6.0.1 - # via - # -c requirements/static/ci/py3.8/linux.txt - # -r requirements/static/ci/invoke.in -six==1.16.0 - # via - # -c requirements/static/ci/py3.8/linux.txt - # blessings diff --git a/requirements/static/ci/py3.9/invoke.txt b/requirements/static/ci/py3.9/invoke.txt deleted file mode 100644 index aeb0bdab1c5..00000000000 --- a/requirements/static/ci/py3.9/invoke.txt +++ /dev/null @@ -1,18 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/invoke.txt requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==6.0.1 - # via - # -c requirements/static/ci/py3.9/linux.txt - # -r requirements/static/ci/invoke.in -six==1.16.0 - # via - # -c requirements/static/ci/py3.9/linux.txt - # blessings diff --git a/tasks/README.md b/tasks/README.md deleted file mode 100644 index 6ff3fb10a7d..00000000000 --- a/tasks/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# What is this directory? - -This directory contains python scripts which should be called by [invoke](https://pypi.org/project/invoke). - -Instead of having several multi-purpose python scripts scatered through multiple paths in the salt code base, -we will now concentrate them under an invoke task. - -## Calling Invoke - -Invoke can be called in the following ways. - -### Installed system-wide - -If invoke is installed system-wide, be sure you also have `blessings` installed if you want coloured output, although -it's not a hard requirement. - -``` -inv docs.check -``` - -### Using Nox - -Since salt already uses nox, and nox manages virtual environments and respective requirements, calling invoke is as -simple as: - -``` -nox -e invoke -- docs.check -``` diff --git a/tasks/__init__.py b/tasks/__init__.py deleted file mode 100644 index 5f5aac88cb8..00000000000 --- a/tasks/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from invoke import Collection # pylint: disable=3rd-party-module-not-gated - -from . import docs, docstrings, filemap, loader - -ns = Collection() -ns.add_collection(Collection.from_module(docs, name="docs"), name="docs") -ns.add_collection( - Collection.from_module(docstrings, name="docstrings"), name="docstrings" -) -ns.add_collection(Collection.from_module(loader, name="loader"), name="loader") -ns.add_collection(Collection.from_module(filemap, name="filemap"), name="filemap") diff --git a/tasks/utils.py b/tasks/utils.py deleted file mode 100644 index e082508a5a3..00000000000 --- a/tasks/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -""" - tasks.utils - ~~~~~~~~~~~ - - Invoke utilities -""" - -import sys - -try: - from blessings import Terminal - - try: - terminal = Terminal() - HAS_BLESSINGS = True - except Exception: # pylint: disable=broad-except - terminal = None - HAS_BLESSINGS = False -except ImportError: - terminal = None - HAS_BLESSINGS = False - - -def exit_invoke(exitcode, message=None, *args, **kwargs): - if message is not None: - if exitcode > 0: - warn(message, *args, **kwargs) - else: - info(message, *args, **kwargs) - sys.exit(exitcode) - - -def info(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.green(message)) - write_message(message) - - -def warn(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.yellow(message)) - write_message(message) - - -def error(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.red(message)) - write_message(message) - - -def write_message(message): - sys.stderr.write(message) - if not message.endswith("\n"): - sys.stderr.write("\n") - sys.stderr.flush() From 78e218131475b1907c8af46973507dd07f2ba5c5 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 27 May 2023 20:23:44 +0100 Subject: [PATCH 179/196] Echo the installed version Signed-off-by: Pedro Algarvio --- .github/actions/setup-python-tools-scripts/action.yml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 85123e98fe5..7bba4321c8c 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -42,9 +42,6 @@ runs: with: path: ${{ inputs.cwd }}/.tools-venvs key: ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ inputs.cache-suffix && format('|{0}', inputs.cache-suffix) || '' }} - restore-keys: | - ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ inputs.cache-suffix && format('|{0}', inputs.cache-suffix) || '' }} - ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }} - name: Install 'python-tools-scripts' shell: bash @@ -62,5 +59,7 @@ runs: shell: bash working-directory: ${{ inputs.cwd }} run: | - VERSION=$(tools --version) + # The first time `tools` runs with newer virtual enviroments we need to disregard the output + tools --debug --version + VERSION=$(tools --version | tail -n 1) echo "version=$VERSION" >> "${GITHUB_OUTPUT}" From 56570f887f4a3082b44acaf09eda147dfddc943a Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Tue, 6 Jun 2023 08:49:55 +0100 Subject: [PATCH 180/196] Bump cache seed Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 2 +- .github/workflows/nightly.yml | 2 +- .github/workflows/release.yml | 2 +- .github/workflows/scheduled.yml | 2 +- .github/workflows/staging.yml | 2 +- .github/workflows/templates/layout.yml.jinja | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 478f78ae0ad..c524934030f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -16,7 +16,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 54ed810e08f..a0308e5b785 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -22,7 +22,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 7d2d473ddaa..63a17faabe4 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -21,7 +21,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 9650cf46f96..a801b5a8bc2 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -12,7 +12,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 7ce8aa13cfc..3a44e39b57d 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -37,7 +37,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index e16b70d4bd3..2d7afcb51bb 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -34,7 +34,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-3 # Bump the number to invalidate all caches + CACHE_SEED: SEED-5 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" <%- endblock env %> From 80f39400be0b523334fca173a8434569b7fa4e64 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Wed, 15 Nov 2023 15:24:01 +0000 Subject: [PATCH 181/196] Colored pre-commit output Signed-off-by: Pedro Algarvio --- .github/workflows/ci.yml | 1 + .github/workflows/nightly.yml | 1 + .github/workflows/pre-commit-action.yml | 3 +++ .github/workflows/scheduled.yml | 1 + .github/workflows/staging.yml | 1 + .github/workflows/templates/ci.yml.jinja | 1 + 6 files changed, 8 insertions(+) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c524934030f..34c4d8611bd 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -350,6 +350,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index a0308e5b785..34e055d4974 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -399,6 +399,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml index a4f97ae5d33..2847ffe64d0 100644 --- a/.github/workflows/pre-commit-action.yml +++ b/.github/workflows/pre-commit-action.yml @@ -26,6 +26,9 @@ jobs: container: image: ghcr.io/saltstack/salt-ci-containers/python:3.10 + env: + PRE_COMMIT_COLOR: always + steps: - name: Install System Deps diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index a801b5a8bc2..a267fcb43f7 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -384,6 +384,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 3a44e39b57d..88c297dd5d0 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -389,6 +389,7 @@ jobs: if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index 79b322cc812..b3d771891e1 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -155,6 +155,7 @@ if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ From 4fc766bca893c873d9e5398444f1a608c78baa2e Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 17 Nov 2023 18:44:47 +0000 Subject: [PATCH 182/196] Add Py3.11 requirements Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 271 +++++++ requirements/static/ci/py3.11/changelog.txt | 36 + requirements/static/ci/py3.11/cloud.txt | 686 +++++++++++++++++ .../static/ci/py3.11/darwin-crypto.txt | 10 + requirements/static/ci/py3.11/darwin.txt | 482 ++++++++++++ requirements/static/ci/py3.11/docs.txt | 196 +++++ .../static/ci/py3.11/freebsd-crypto.txt | 10 + requirements/static/ci/py3.11/freebsd.txt | 474 ++++++++++++ requirements/static/ci/py3.11/lint.txt | 687 ++++++++++++++++++ .../static/ci/py3.11/linux-crypto.txt | 10 + requirements/static/ci/py3.11/linux.txt | 523 +++++++++++++ .../static/ci/py3.11/tools-virustotal.txt | 28 + requirements/static/ci/py3.11/tools.txt | 50 +- .../static/ci/py3.11/windows-crypto.txt | 12 + requirements/static/ci/py3.11/windows.txt | 499 +++++++++++++ requirements/static/pkg/py3.11/darwin.txt | 123 ++++ requirements/static/pkg/py3.11/freebsd.txt | 107 +++ requirements/static/pkg/py3.11/linux.txt | 107 +++ requirements/static/pkg/py3.11/windows.txt | 141 ++++ 19 files changed, 4436 insertions(+), 16 deletions(-) create mode 100644 requirements/static/ci/py3.11/changelog.txt create mode 100644 requirements/static/ci/py3.11/cloud.txt create mode 100644 requirements/static/ci/py3.11/darwin-crypto.txt create mode 100644 requirements/static/ci/py3.11/darwin.txt create mode 100644 requirements/static/ci/py3.11/docs.txt create mode 100644 requirements/static/ci/py3.11/freebsd-crypto.txt create mode 100644 requirements/static/ci/py3.11/freebsd.txt create mode 100644 requirements/static/ci/py3.11/lint.txt create mode 100644 requirements/static/ci/py3.11/linux-crypto.txt create mode 100644 requirements/static/ci/py3.11/linux.txt create mode 100644 requirements/static/ci/py3.11/tools-virustotal.txt create mode 100644 requirements/static/ci/py3.11/windows-crypto.txt create mode 100644 requirements/static/ci/py3.11/windows.txt create mode 100644 requirements/static/pkg/py3.11/darwin.txt create mode 100644 requirements/static/pkg/py3.11/freebsd.txt create mode 100644 requirements/static/pkg/py3.11/linux.txt create mode 100644 requirements/static/pkg/py3.11/windows.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d52ea63f244..ee5beec9705 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -205,6 +205,21 @@ repos: - --no-emit-index-url - requirements/static/pkg/linux.in + - id: pip-tools-compile + alias: compile-pkg-linux-3.11-zmq-requirements + name: Linux Packaging Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.11/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/linux.in + - id: pip-tools-compile alias: compile-pkg-freebsd-3.7-zmq-requirements name: FreeBSD Packaging Py3.7 ZeroMQ Requirements @@ -265,6 +280,21 @@ repos: - --no-emit-index-url - requirements/static/pkg/freebsd.in + - id: pip-tools-compile + alias: compile-pkg-freebsd-3.11-zmq-requirements + name: FreeBSD Packaging Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.11/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/freebsd.in + - id: pip-tools-compile alias: compile-pkg-darwin-3.9-zmq-requirements name: Darwin Packaging Py3.9 ZeroMQ Requirements @@ -293,6 +323,20 @@ repos: - --no-emit-index-url - requirements/static/pkg/darwin.in + - id: pip-tools-compile + alias: compile-pkg-darwin-3.11-zmq-requirements + name: Darwin Packaging Py3.11 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|crypto|darwin)\.txt|static/pkg/(darwin\.in|py3\.11/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=darwin + - --include=requirements/darwin.txt + - --no-emit-index-url + - requirements/static/pkg/darwin.in + - id: pip-tools-compile alias: compile-pkg-windows-3.7-zmq-requirements name: Windows Packaging Py3.7 ZeroMQ Requirements @@ -349,6 +393,20 @@ repos: - --no-emit-index-url - requirements/static/pkg/windows.in + - id: pip-tools-compile + alias: compile-pkg-windows-3.11-zmq-requirements + name: Windows Packaging Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.11/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=windows + - --include=requirements/windows.txt + - --no-emit-index-url + - requirements/static/pkg/windows.in + # <---- Packaging Requirements ------------------------------------------------------------------------------------- # ----- CI Requirements -------------------------------------------------------------------------------------------> @@ -424,6 +482,24 @@ repos: - --no-emit-index-url - requirements/static/ci/linux.in + - id: pip-tools-compile + alias: compile-ci-linux-3.11-zmq-requirements + name: Linux CI Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.11/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/linux.in + - id: pip-tools-compile alias: compile-ci-linux-crypto-3.7-requirements name: Linux CI Py3.7 Crypto Requirements @@ -481,6 +557,21 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-linux-crypto-3.11-requirements + name: Linux CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/linux-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --out-prefix=linux + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-freebsd-3.7-zmq-requirements @@ -554,6 +645,24 @@ repos: - --no-emit-index-url - requirements/static/ci/freebsd.in + - id: pip-tools-compile + alias: compile-ci-freebsd-3.11-zmq-requirements + name: FreeBSD CI Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.11/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/freebsd.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/freebsd.in + - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.7-requirements name: FreeBSD CI Py3.7 Crypto Requirements @@ -611,6 +720,21 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-freebsd-crypto-3.11-requirements + name: FreeBSD CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/freebsd-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=freebsd + - --out-prefix=freebsd + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-darwin-3.9-zmq-requirements name: Darwin CI Py3.9 ZeroMQ Requirements @@ -645,6 +769,23 @@ repos: - --no-emit-index-url - requirements/static/ci/darwin.in + - id: pip-tools-compile + alias: compile-ci-darwin-3.11-zmq-requirements + name: Darwin CI Py3.11 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.11/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=darwin + - --include=requirements/darwin.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/darwin.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/darwin.in + - id: pip-tools-compile alias: compile-ci-darwin-crypto-3.9-requirements name: Darwin CI Py3.9 Crypto Requirements @@ -673,6 +814,20 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-darwin-crypto-3.11-requirements + name: Darwin CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/darwin-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=darwin + - --out-prefix=darwin + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-windows-3.7-zmq-requirements name: Windows CI Py3.7 ZeroMQ Requirements @@ -741,6 +896,23 @@ repos: - --no-emit-index-url - requirements/static/ci/windows.in + - id: pip-tools-compile + alias: compile-ci-windows-3.11-zmq-requirements + name: Windows CI Py3.11 ZeroMQ Requirements + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.11/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=windows + - --include=requirements/windows.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/windows.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/windows.in + - id: pip-tools-compile alias: compile-ci-windows-crypto-3.7-requirements name: Windows CI Py3.7 Crypto Requirements @@ -797,6 +969,20 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-windows-crypto-3.11-requirements + name: Windows CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/windows-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=windows + - --out-prefix=windows + - --no-emit-index-url + - requirements/static/ci/crypto.in + # <---- CI Requirements -------------------------------------------------------------------------------------------- @@ -868,6 +1054,23 @@ repos: - --include=requirements/static/ci/common.in - --no-emit-index-url - requirements/static/ci/cloud.in + + - id: pip-tools-compile + alias: compile-ci-cloud-3.11-requirements + name: Cloud CI Py3.11 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.11/cloud\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/cloud.in # <---- Cloud CI Requirements -------------------------------------------------------------------------------------- # ----- Doc CI Requirements ---------------------------------------------------------------------------------------> @@ -931,6 +1134,21 @@ repos: - --no-emit-index-url - requirements/static/ci/docs.in + - id: pip-tools-compile + alias: compile-doc-requirements + name: Docs CI Py3.11 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/ci/docs.in + # <---- Doc CI Requirements ---------------------------------------------------------------------------------------- # ----- Lint CI Requirements --------------------------------------------------------------------------------------> @@ -1006,6 +1224,24 @@ repos: - --no-emit-index-url - requirements/static/ci/lint.in + - id: pip-tools-compile + alias: compile-ci-lint-3.11-requirements + name: Lint CI Py3.11 Requirements + files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.11/linux\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/lint.in + # <---- Lint CI Requirements --------------------------------------------------------------------------------------- # ----- Changelog -------------------------------------------------------------------------------------------------> @@ -1047,6 +1283,19 @@ repos: - --platform=linux - --no-emit-index-url - requirements/static/ci/changelog.in + + - id: pip-tools-compile + alias: compile-ci-changelog-3.11-requirements + name: Changelog CI Py3.11 Requirements + files: ^requirements/static/ci/(changelog\.in|py3\.11/(changelog|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --no-emit-index-url + - requirements/static/ci/changelog.in # <---- Changelog -------------------------------------------------------------------------------------------------- # ----- Tools ----------------------------------------------------------------------------------------------------> @@ -1074,6 +1323,18 @@ repos: - --no-emit-index-url - requirements/static/ci/tools.in + - id: pip-tools-compile + alias: compile-ci-tools-3.11-requirements + name: Linux CI Py3.11 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.11/(tools|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --no-emit-index-url + - requirements/static/ci/tools.in + - id: pip-tools-compile alias: compile-ci-tools-virustotal-3.9-requirements name: Linux CI Py3.9 Tools virustotal Requirements @@ -1093,6 +1354,16 @@ repos: - -v - --py-version=3.10 - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.11-requirements + name: Linux CI Py3.11 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.11/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.11 + - requirements/static/ci/tools-virustotal.in # <---- Tools ----------------------------------------------------------------------------------------------------- # ----- Code Formatting -------------------------------------------------------------------------------------------> diff --git a/requirements/static/ci/py3.11/changelog.txt b/requirements/static/ci/py3.11/changelog.txt new file mode 100644 index 00000000000..2aa97aa5da2 --- /dev/null +++ b/requirements/static/ci/py3.11/changelog.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/changelog.txt requirements/static/ci/changelog.in +# +click-default-group==1.2.2 + # via towncrier +click==7.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # click-default-group + # towncrier +incremental==17.5.0 + # via towncrier +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/changelog.in +towncrier==22.12.0 + # via -r requirements/static/ci/changelog.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt new file mode 100644 index 00000000000..b2ff4c59338 --- /dev/null +++ b/requirements/static/ci/py3.11/cloud.txt @@ -0,0 +1,686 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/cloud.in + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # pyspnego + # requests-ntlm + # smbprotocol + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +flaky==3.7.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # yarl +netaddr==0.7.19 + # via -r requirements/static/ci/cloud.in +ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +profitbricks==4.1.3 + # via -r requirements/static/ci/cloud.in +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/crypto.txt +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pypsexec==0.1.0 + # via -r requirements/static/ci/cloud.in +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jsonschema +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol +pytest-custom-exit-code==0.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-salt-factories +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pywinrm==0.3.0 + # via -r requirements/static/ci/cloud.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # profitbricks + # pyvmomi + # pywinrm + # requests-ntlm + # responses + # vcert +responses==0.10.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # profitbricks + # pypsexec + # python-dateutil + # pyvmomi + # pywinrm + # responses + # vcert + # virtualenv + # websocket-client +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto + # pywinrm +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/darwin-crypto.txt b/requirements/static/ci/py3.11/darwin-crypto.txt new file mode 100644 index 00000000000..c0aacf41077 --- /dev/null +++ b/requirements/static/ci/py3.11/darwin-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt new file mode 100644 index 00000000000..0c6824eb714 --- /dev/null +++ b/requirements/static/ci/py3.11/darwin.txt @@ -0,0 +1,482 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +click==7.0 + # via geomet +clustershell==1.8.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # moto + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/darwin.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +linode-python==1.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/darwin.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.1 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/darwin.in +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint + # yamlordereddictloader +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert + # vultr +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.2.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +vultr==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/darwin.in +yamlordereddictloader==0.4.0 + # via -r requirements/static/ci/darwin.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt new file mode 100644 index 00000000000..1a2bac96dca --- /dev/null +++ b/requirements/static/ci/py3.11/docs.txt @@ -0,0 +1,196 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# +alabaster==0.7.12 + # via sphinx +babel==2.9.1 + # via sphinx +certifi==2023.07.22 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +cheroot==8.5.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/docs.in +contextvars==2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +distro==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +docutils==0.19 + # via sphinx +idna==3.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +imagesize==1.4.1 + # via sphinx +immutables==0.15 + # via + # -c requirements/static/ci/py3.11/linux.txt + # contextvars +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # myst-docutils + # sphinx +jmespath==1.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +linkify-it-py==1.0.3 + # via myst-docutils +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +markdown-it-py==2.2.0 + # via + # mdit-py-plugins + # myst-docutils +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 +mdit-py-plugins==0.3.3 + # via myst-docutils +mdurl==0.1.2 + # via markdown-it-py +more-itertools==5.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +myst-docutils[linkify]==0.18.1 + # via -r requirements/static/ci/docs.in +packaging==22.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # sphinx +portend==2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/crypto.txt +pyenchant==3.2.2 + # via sphinxcontrib-spelling +pygments==2.14.0 + # via sphinx +pytz==2022.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # babel + # tempora +pyyaml==6.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # myst-docutils +pyzmq==23.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/zeromq.txt +requests==2.31.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # sphinx +six==1.16.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # more-itertools + # sphinxcontrib.httpdomain +snowballstemmer==2.1.0 + # via sphinx +sphinx==6.1.3 ; python_version >= "3.9" + # via + # -r requirements/static/ci/docs.in + # sphinxcontrib-spelling + # sphinxcontrib.httpdomain +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==7.7.0 + # via -r requirements/static/ci/docs.in +sphinxcontrib.httpdomain==1.8.1 + # via -r requirements/static/ci/docs.in +tempora==4.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # portend +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # myst-docutils +uc-micro-py==1.0.1 + # via linkify-it-py +urllib3==1.26.18 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +zc.lockfile==1.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/freebsd-crypto.txt b/requirements/static/ci/py3.11/freebsd-crypto.txt new file mode 100644 index 00000000000..33399b9ff51 --- /dev/null +++ b/requirements/static/ci/py3.11/freebsd-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt new file mode 100644 index 00000000000..b290eea30b4 --- /dev/null +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -0,0 +1,474 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.24.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in +click==7.1.2 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/freebsd.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/freebsd.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/freebsd.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/freebsd.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt new file mode 100644 index 00000000000..0e9e87631dd --- /dev/null +++ b/requirements/static/ci/py3.11/lint.txt @@ -0,0 +1,687 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +ansible-core==2.14.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # ansible +ansible==7.1.0 ; python_version >= "3.9" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator + # oscrypto +astroid==2.3.3 + # via pylint +async-timeout==4.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # jsonschema +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +hglib==2.6.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +isort==4.3.21 + # via pylint +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +lazy-object-proxy==1.4.3 + # via astroid +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mccabe==0.6.1 + # via pylint +mercurial==6.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +modernize==0.5 + # via saltpylint +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # yarl +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # docker +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pathspec==0.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pyasn1-modules + # rsa +pycodestyle==2.5.0 + # via saltpylint +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # twilio +pylint==2.4.4 + # via + # -r requirements/static/ci/lint.in + # saltpylint +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jsonschema +python-consul==1.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/zeromq.txt +redis-py-cluster==2.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +redis==3.5.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 +saltpylint==2023.8.3 + # via -r requirements/static/ci/lint.in +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # apscheduler + # astroid + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # slack-bolt +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/ci/lint.in +tornado==6.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # python-telegram-bot +twilio==7.9.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +tzlocal==3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +wrapt==1.11.1 + # via astroid +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +yamllint==1.26.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/linux-crypto.txt b/requirements/static/ci/py3.11/linux-crypto.txt new file mode 100644 index 00000000000..89873b20c9e --- /dev/null +++ b/requirements/static/ci/py3.11/linux-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/linux-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt new file mode 100644 index 00000000000..8530773540b --- /dev/null +++ b/requirements/static/ci/py3.11/linux.txt @@ -0,0 +1,523 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +ansible-core==2.14.1 + # via ansible +ansible==7.1.0 ; python_version >= "3.9" + # via -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via python-telegram-bot +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/linux.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via twilio +pymysql==1.0.2 + # via -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-consul==1.1.0 + # via -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +redis-py-cluster==2.1.3 + # via -r requirements/static/ci/linux.in +redis==3.5.3 + # via redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via ansible-core +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # apscheduler + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via slack-bolt +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.1 + # via python-telegram-bot +twilio==7.9.2 + # via -r requirements/static/ci/linux.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +tzlocal==3.0 + # via apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/linux.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/tools-virustotal.txt b/requirements/static/ci/py3.11/tools-virustotal.txt new file mode 100644 index 00000000000..1b0f1bd5b8e --- /dev/null +++ b/requirements/static/ci/py3.11/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.11/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt index 771b449b952..06046989a38 100644 --- a/requirements/static/ci/py3.11/tools.txt +++ b/requirements/static/ci/py3.11/tools.txt @@ -15,44 +15,62 @@ botocore==1.24.46 # boto3 # s3transfer certifi==2023.07.22 - # via requests -charset-normalizer==3.0.1 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests commonmark==0.9.1 # via rich idna==3.2 - # via requests + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests jinja2==3.1.2 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/tools.in jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # boto3 # botocore markupsafe==2.1.2 - # via jinja2 -packaging==23.0 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/tools.in pygments==2.13.0 # via rich -python-dateutil==2.8.2 - # via botocore -python-tools-scripts==0.18.1 +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # botocore +python-tools-scripts==0.18.3 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 - # via -r requirements/static/ci/tools.in + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/tools.in requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # python-tools-scripts - # virustotal3 rich==12.5.1 # via python-tools-scripts s3transfer==0.5.2 # via boto3 six==1.16.0 - # via python-dateutil + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # python-dateutil urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt # botocore # requests -virustotal3==1.0.8 - # via -r requirements/static/ci/tools.in diff --git a/requirements/static/ci/py3.11/windows-crypto.txt b/requirements/static/ci/py3.11/windows-crypto.txt new file mode 100644 index 00000000000..25f318a71ba --- /dev/null +++ b/requirements/static/ci/py3.11/windows-crypto.txt @@ -0,0 +1,12 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.37.1 + # via -r requirements/static/ci/crypto.in +parameterized==0.8.1 + # via m2crypto +pycryptodome==3.10.1 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt new file mode 100644 index 00000000000..1565296a17f --- /dev/null +++ b/requirements/static/ci/py3.11/windows.txt @@ -0,0 +1,499 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +bcrypt==4.0.1 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # kubernetes + # requests +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # clr-loader + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # aiohttp + # requests +cheetah3==3.2.6.post1 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +click==7.1.2 + # via geomet +clr-loader==0.2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pythonnet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +colorama==0.4.1 + # via pytest +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # etcd3-py + # moto + # pyopenssl + # requests-ntlm +distlib==0.3.6 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-skip-markers +dmidecode==0.9.0 + # via -r requirements/static/ci/windows.in +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.8.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.3 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +google-auth==2.1.0 + # via kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +iniconfig==1.0.1 + # via pytest +ioloop==0.1a0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.collections +jaraco.collections==3.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +lxml==4.9.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +ntlm-auth==1.5.0 + # via requests-ntlm +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +patch==1.16 + # via -r requirements/static/ci/windows.in +pathspec==0.10.2 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.5.4 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/windows.in +pymssql==2.2.7 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pymysql==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # botocore + # kubernetes + # moto +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pythonnet==3.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pywin32==305 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # docker + # pytest-skip-markers + # wmi +pywinrm==0.4.1 + # via -r requirements/static/ci/windows.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==25.0.2 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # pywinrm + # requests-ntlm + # responses +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +sed==0.3.1 + # via -r requirements/static/ci/windows.in +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +six==1.15.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # pywinrm + # responses + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.4.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +virtualenv==20.17.0 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +wheel==0.38.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +wmi==1.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +xmltodict==0.12.0 + # via + # moto + # pywinrm +yamllint==1.28.0 + # via -r requirements/static/ci/windows.in +yarl==1.8.1 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt new file mode 100644 index 00000000000..666aeb92e76 --- /dev/null +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -0,0 +1,123 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# +apache-libcloud==2.5.0 + # via -r requirements/darwin.txt +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/darwin.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/darwin.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/darwin.txt +idna==3.2 + # via + # -r requirements/darwin.txt + # requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/darwin.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +linode-python==1.1.1 + # via -r requirements/darwin.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/darwin.txt +pycparser==2.21 + # via + # -r requirements/darwin.txt + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/darwin.txt +python-dateutil==2.8.0 + # via -r requirements/darwin.txt +python-gnupg==0.4.8 + # via -r requirements/darwin.txt +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # apache-libcloud + # vultr +setproctitle==1.3.2 + # via -r requirements/darwin.txt +six==1.16.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/darwin.txt +urllib3==1.26.18 + # via requests +vultr==1.0.1 + # via -r requirements/darwin.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt new file mode 100644 index 00000000000..a722d417d33 --- /dev/null +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/freebsd.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl +distro==1.5.0 + # via + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/freebsd.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/freebsd.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/freebsd.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/freebsd.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt new file mode 100644 index 00000000000..9b21c922da3 --- /dev/null +++ b/requirements/static/pkg/py3.11/linux.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/linux.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/linux.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/linux.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/linux.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/linux.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +rpm-vercmp==0.1.2 + # via -r requirements/static/pkg/linux.in +setproctitle==1.3.2 + # via -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/linux.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.6.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt new file mode 100644 index 00000000000..9beb774218d --- /dev/null +++ b/requirements/static/pkg/py3.11/windows.txt @@ -0,0 +1,141 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# +certifi==2023.07.22 + # via + # -r requirements/windows.txt + # requests +cffi==1.14.6 + # via + # -r requirements/windows.txt + # clr-loader + # cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/windows.txt +clr-loader==0.2.4 + # via pythonnet +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/windows.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/windows.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/windows.txt +ioloop==0.1a0 + # via -r requirements/windows.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.3.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +lxml==4.9.1 + # via -r requirements/windows.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/windows.txt +pycparser==2.21 + # via + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via -r requirements/crypto.txt +pymssql==2.2.7 + # via -r requirements/windows.txt +pymysql==1.0.2 + # via -r requirements/windows.txt +pyopenssl==23.2.0 + # via -r requirements/windows.txt +python-dateutil==2.8.1 + # via -r requirements/windows.txt +python-gnupg==0.4.8 + # via -r requirements/windows.txt +pythonnet==3.0.1 + # via -r requirements/windows.txt +pytz==2022.1 + # via tempora +pywin32==305 + # via + # -r requirements/windows.txt + # wmi +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.0.2 ; sys_platform == "win32" + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # -r requirements/windows.txt +setproctitle==1.3.2 + # via -r requirements/windows.txt +six==1.15.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/windows.txt +urllib3==1.26.18 + # via + # -r requirements/windows.txt + # requests +wheel==0.38.4 + # via -r requirements/windows.txt +wmi==1.5.1 + # via -r requirements/windows.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools From a0127c04f7c541247fead3da98147835550f2561 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 16 Nov 2023 18:49:28 +0000 Subject: [PATCH 183/196] Set `TOOLS_VIRTUALENV_CACHE_SEED` Signed-off-by: Pedro Algarvio --- .github/actions/setup-python-tools-scripts/action.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 7bba4321c8c..e7b15b679ef 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -37,6 +37,12 @@ runs: with: python-binary: python3 + - name: Set `TOOLS_VIRTUALENV_CACHE_SEED` + shell: bash + run: | + TOOLS_VIRTUALENV_CACHE_SEED="${{ inputs.cache-prefix }}|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ inputs.cache-suffix || '' }}" + echo "TOOLS_VIRTUALENV_CACHE_SEED=${TOOLS_VIRTUALENV_CACHE_SEED}" | tee -a "${GITHUB_ENV}" + - name: Restore Python Tools Virtualenvs Cache uses: actions/cache@v3 with: From 7294d8bc72e7ff750e68339f8dfea38661934864 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 17 Nov 2023 20:05:33 +0000 Subject: [PATCH 184/196] Install tools in a virtualenv Signed-off-by: Pedro Algarvio --- .../setup-python-tools-scripts/action.yml | 25 +++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index e7b15b679ef..845f3383800 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -37,27 +37,38 @@ runs: with: python-binary: python3 - - name: Set `TOOLS_VIRTUALENV_CACHE_SEED` + - name: Define Cache Hash + id: venv-hash shell: bash run: | - TOOLS_VIRTUALENV_CACHE_SEED="${{ inputs.cache-prefix }}|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ inputs.cache-suffix || '' }}" - echo "TOOLS_VIRTUALENV_CACHE_SEED=${TOOLS_VIRTUALENV_CACHE_SEED}" | tee -a "${GITHUB_ENV}" + VENV_NAME_HASH=$(echo "${{ inputs.cache-prefix }}|${{ github.workflow }}|${{ + steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ + inputs.cache-suffix || '' }}" | sha256sum | cut -d ' ' -f 1) + echo "TOOLS_VIRTUALENV_CACHE_SEED=$VENV_NAME_HASH" | tee -a "${GITHUB_ENV}" + echo "venv-hash=$VENV_NAME_HASH" | tee -a "${GITHUB_OUTPUT}" + + - uses: ./.github/actions/cached-virtualenv + id: tools-virtualenv + with: + name: tools.${{ steps.venv-hash.outputs.venv-hash }} + cache-seed: tools|${{ steps.venv-hash.outputs.venv-hash }} - name: Restore Python Tools Virtualenvs Cache uses: actions/cache@v3 with: path: ${{ inputs.cwd }}/.tools-venvs - key: ${{ inputs.cache-prefix }}|${{ github.workflow }}|tools-venvs|${{ steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ inputs.cache-suffix && format('|{0}', inputs.cache-suffix) || '' }} + key: ${{ inputs.cache-prefix }}|${{ steps.venv-hash.outputs.venv-hash }} - name: Install 'python-tools-scripts' shell: bash working-directory: ${{ inputs.cwd }} run: | - (python3 -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 + PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }} + (${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 if [ $exitcode -eq 0 ]; then - python3 -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + ${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt else - python3 -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + ${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt fi - name: Get 'python-tools-scripts' Version From a2092541e6c360bcfd53855aa984cd3879295705 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 17 Nov 2023 18:22:13 +0000 Subject: [PATCH 185/196] Fix the python executable path for the cached-virtualenv action output Signed-off-by: Pedro Algarvio --- .github/actions/cached-virtualenv/action.yml | 36 ++++++++++++++++---- 1 file changed, 29 insertions(+), 7 deletions(-) diff --git a/.github/actions/cached-virtualenv/action.yml b/.github/actions/cached-virtualenv/action.yml index 23ac4a410ff..7620e52c399 100644 --- a/.github/actions/cached-virtualenv/action.yml +++ b/.github/actions/cached-virtualenv/action.yml @@ -42,19 +42,29 @@ runs: run: | echo "cache-key=${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|cached-venv|${{ steps.get-python-version.outputs.version }}|${{ inputs.name }}" >> "${GITHUB_OUTPUT}" + - name: Define VirtualEnv path + shell: bash + id: virtualenv-path + run: | + cd ${{ github.workspace }} > /dev/null 2>&1 || true + VENVS_PATH=$(echo ".venvs/py${{ steps.get-python-version.outputs.version }}" | python3 -c 'import sys, pathlib; sys.stdout.write(pathlib.Path.cwd().joinpath(sys.stdin.read()).as_posix())') + echo "venvs-path=$VENVS_PATH" | tee -a "$GITHUB_OUTPUT" + VENV_PATH=$(echo ".venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}" | python3 -c 'import sys, pathlib; sys.stdout.write(pathlib.Path.cwd().joinpath(sys.stdin.read()).as_posix())') + echo "venv-path=$VENV_PATH" | tee -a "$GITHUB_OUTPUT" + - name: Cache VirtualEnv id: cache-virtualenv uses: actions/cache@v3 with: key: ${{ steps.setup-cache-key.outputs.cache-key }} - path: ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }} + path: ${{ steps.virtualenv-path.outputs.venv-path }} - name: Create Virtualenv shell: bash if: ${{ steps.cache-virtualenv.outputs.cache-hit != 'true' }} run: | - mkdir -p ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }} - python3 -m venv --upgrade ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }} + mkdir -p ${{ steps.virtualenv-path.outputs.venvs-path }} + python3 -m venv --upgrade ${{ steps.virtualenv-path.outputs.venv-path }} - name: Define python executable output shell: bash @@ -62,10 +72,22 @@ runs: run: | shopt -s nocasematch if [[ "${{ runner.os }}" =~ "win" ]]; then - BIN_DIR="${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}/Scripts" + BIN_DIR="${{ steps.virtualenv-path.outputs.venv-path }}/Scripts" + PY_EXE="$BIN_DIR/python.exe" else - BIN_DIR="${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}/bin" + BIN_DIR="${{ steps.virtualenv-path.outputs.venv-path }}/bin" + PY_EXE="$BIN_DIR/python3" + if [ ! -f "$PY_EXE" ]; then + echo "The '${PY_EXE}' binary does not exist. Setting it to '$BIN_DIR/python' ..." + PY_EXE="$BIN_DIR/python" + fi + if [ ! -f "$PY_EXE" ]; then + echo "The '${PY_EXE}' binary does not exist. Showing the tree output for '${BIN_DIR}' ..." + tree -a "$BIN_DIR" + exit 1 + fi fi shopt -u nocasematch - echo "python-executable=$BIN_DIR/python" >> "${GITHUB_OUTPUT}" - echo "${BIN_DIR}" >> "${GITHUB_PATH}" + $PY_EXE --version + echo "python-executable=$PY_EXE" | tee -a "${GITHUB_OUTPUT}" + echo "${BIN_DIR}" | tee -a "${GITHUB_PATH}" From 3ce935eb21f182c96cd10c0b1458eb323f35d660 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 17 Nov 2023 20:31:44 +0000 Subject: [PATCH 186/196] Setup relenv after python-tools-scripts Signed-off-by: Pedro Algarvio --- .github/workflows/build-deps-onedir-linux.yml | 9 ++++++--- .github/workflows/build-deps-onedir-macos.yml | 9 ++++++--- .github/workflows/build-deps-onedir-windows.yml | 9 ++++++--- 3 files changed, 18 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build-deps-onedir-linux.yml b/.github/workflows/build-deps-onedir-linux.yml index ebd686defdf..a5718071f0e 100644 --- a/.github/workflows/build-deps-onedir-linux.yml +++ b/.github/workflows/build-deps-onedir-linux.yml @@ -59,6 +59,12 @@ jobs: - uses: actions/checkout@v4 + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }} + cache-suffix: build-deps-linux-${{ matrix.arch }} + - name: Setup Relenv id: setup-relenv uses: ./.github/actions/setup-relenv @@ -69,9 +75,6 @@ jobs: cache-seed: ${{ inputs.cache-seed }} python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Install Salt Packaging Dependencies into Relenv Onedir uses: ./.github/actions/build-onedir-deps with: diff --git a/.github/workflows/build-deps-onedir-macos.yml b/.github/workflows/build-deps-onedir-macos.yml index 033a650d0a4..9f0dbc4ec75 100644 --- a/.github/workflows/build-deps-onedir-macos.yml +++ b/.github/workflows/build-deps-onedir-macos.yml @@ -61,6 +61,12 @@ jobs: with: python-version: "3.10" + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }} + cache-suffix: build-deps-macos + - name: Setup Relenv id: setup-relenv uses: ./.github/actions/setup-relenv @@ -71,9 +77,6 @@ jobs: cache-seed: ${{ inputs.cache-seed }} python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Install Salt Packaging Dependencies into Relenv Onedir uses: ./.github/actions/build-onedir-deps with: diff --git a/.github/workflows/build-deps-onedir-windows.yml b/.github/workflows/build-deps-onedir-windows.yml index bb7538a6ef0..fe0fb2d8253 100644 --- a/.github/workflows/build-deps-onedir-windows.yml +++ b/.github/workflows/build-deps-onedir-windows.yml @@ -62,6 +62,12 @@ jobs: with: python-version: "3.10" + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }} + cache-suffix: build-deps-windows-${{ matrix.arch }} + - name: Setup Relenv id: setup-relenv uses: ./.github/actions/setup-relenv @@ -72,9 +78,6 @@ jobs: cache-seed: ${{ inputs.cache-seed }} python-version: ${{ inputs.python-version }} - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Install Salt Packaging Dependencies into Relenv Onedir uses: ./.github/actions/build-onedir-deps with: From 234693b4aa7f54666ad84e068342ff931cc61f92 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 18 Nov 2023 19:14:11 +0000 Subject: [PATCH 187/196] Drop the `cache-suffix` from the `setup-python-tools-scripts` action Signed-off-by: Pedro Algarvio --- .github/actions/setup-python-tools-scripts/action.yml | 8 ++------ .github/workflows/build-deps-onedir-linux.yml | 3 +-- .github/workflows/build-deps-onedir-macos.yml | 3 +-- .github/workflows/build-deps-onedir-windows.yml | 3 +-- .github/workflows/ci.yml | 9 +++------ .github/workflows/nightly.yml | 9 +++------ .github/workflows/scheduled.yml | 9 +++------ .github/workflows/staging.yml | 6 ++---- .github/workflows/templates/ci.yml.jinja | 9 +++------ 9 files changed, 19 insertions(+), 40 deletions(-) diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 845f3383800..eec3c4e4e96 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -7,10 +7,6 @@ inputs: required: true type: string description: Seed used to invalidate caches - cache-suffix: - required: false - type: string - description: Seed used to invalidate caches cwd: type: string description: The directory the salt checkout is located in @@ -42,8 +38,8 @@ runs: shell: bash run: | VENV_NAME_HASH=$(echo "${{ inputs.cache-prefix }}|${{ github.workflow }}|${{ - steps.get-python-version.outputs.version-sha256sum }}|${{ hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}${{ - inputs.cache-suffix || '' }}" | sha256sum | cut -d ' ' -f 1) + steps.get-python-version.outputs.version-sha256sum }}|${{ + hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}" | sha256sum | cut -d ' ' -f 1) echo "TOOLS_VIRTUALENV_CACHE_SEED=$VENV_NAME_HASH" | tee -a "${GITHUB_ENV}" echo "venv-hash=$VENV_NAME_HASH" | tee -a "${GITHUB_OUTPUT}" diff --git a/.github/workflows/build-deps-onedir-linux.yml b/.github/workflows/build-deps-onedir-linux.yml index a5718071f0e..8d149c46261 100644 --- a/.github/workflows/build-deps-onedir-linux.yml +++ b/.github/workflows/build-deps-onedir-linux.yml @@ -62,8 +62,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }} - cache-suffix: build-deps-linux-${{ matrix.arch }} + cache-prefix: ${{ inputs.cache-seed }}-build-deps-linux-${{ matrix.arch }} - name: Setup Relenv id: setup-relenv diff --git a/.github/workflows/build-deps-onedir-macos.yml b/.github/workflows/build-deps-onedir-macos.yml index 9f0dbc4ec75..02cf21c5365 100644 --- a/.github/workflows/build-deps-onedir-macos.yml +++ b/.github/workflows/build-deps-onedir-macos.yml @@ -64,8 +64,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }} - cache-suffix: build-deps-macos + cache-prefix: ${{ inputs.cache-seed }}-build-deps-macos - name: Setup Relenv id: setup-relenv diff --git a/.github/workflows/build-deps-onedir-windows.yml b/.github/workflows/build-deps-onedir-windows.yml index fe0fb2d8253..af741e06224 100644 --- a/.github/workflows/build-deps-onedir-windows.yml +++ b/.github/workflows/build-deps-onedir-windows.yml @@ -65,8 +65,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ inputs.cache-seed }} - cache-suffix: build-deps-windows-${{ matrix.arch }} + cache-prefix: ${{ inputs.cache-seed }}-build-deps-windows-${{ matrix.arch }} - name: Setup Relenv id: setup-relenv diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34c4d8611bd..bb17af3705c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -279,8 +279,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: changelog + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -400,8 +399,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: build + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -2741,8 +2739,7 @@ jobs: id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: coverage + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 34e055d4974..96403e91c3c 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -323,8 +323,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: changelog + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -449,8 +448,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: build + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -2802,8 +2800,7 @@ jobs: id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: coverage + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index a267fcb43f7..c373dbe34fd 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -313,8 +313,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: changelog + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -434,8 +433,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: build + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -2775,8 +2773,7 @@ jobs: id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: coverage + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 88c297dd5d0..a34f7f177d3 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -317,8 +317,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: changelog + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -439,8 +438,7 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: build + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index b3d771891e1..b02604c40d8 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -71,8 +71,7 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: changelog + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -218,8 +217,7 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: build + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -330,8 +328,7 @@ id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts with: - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - cache-suffix: coverage + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage - name: Install Nox run: | From 8ec9843bb39a8beef10e8c8a6579c9707913d526 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 24 Nov 2023 22:02:52 +0000 Subject: [PATCH 188/196] Fix/De-complicate the performance test scenarios Signed-off-by: Pedro Algarvio --- .../pytests/scenarios/performance/conftest.py | 37 +-- .../scenarios/performance/test_performance.py | 260 +++++++++--------- 2 files changed, 132 insertions(+), 165 deletions(-) diff --git a/tests/pytests/scenarios/performance/conftest.py b/tests/pytests/scenarios/performance/conftest.py index d156535ff1d..13fbb831d7c 100644 --- a/tests/pytests/scenarios/performance/conftest.py +++ b/tests/pytests/scenarios/performance/conftest.py @@ -5,17 +5,10 @@ import logging import shutil import pytest -from saltfactories.daemons.container import Container +from saltfactories.utils import random_string -import salt.utils.path from tests.support.sminion import create_sminion -docker = pytest.importorskip("docker") -# pylint: disable=3rd-party-module-not-gated,no-name-in-module -from docker.errors import DockerException # isort:skip - -# pylint: enable=3rd-party-module-not-gated,no-name-in-module - pytestmark = [ pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("docker"), @@ -26,36 +19,18 @@ log = logging.getLogger(__name__) @pytest.fixture(scope="session") -def docker_client(): - if docker is None: - pytest.skip("The docker python library is not available") - - if salt.utils.path.which("docker") is None: - pytest.skip("The docker binary is not available") - try: - client = docker.from_env() - connectable = Container.client_connectable(client) - if connectable is not True: # pragma: no cover - pytest.skip(connectable) - return client - except DockerException: - pytest.skip("Failed to get a connection to docker running on the system") +def docker_network_name(): + return random_string("salt-perf-", uppercase=False) @pytest.fixture(scope="session") -def network(): - return "salt-performance" - - -@pytest.fixture(scope="session") -def host_docker_network_ip_address(network): +def host_docker_network_ip_address(docker_network_name): sminion = create_sminion() - network_name = network network_subnet = "10.0.21.0/24" network_gateway = "10.0.21.1" try: ret = sminion.states.docker_network.present( - network_name, + docker_network_name, driver="bridge", ipam_pools=[{"subnet": network_subnet, "gateway": network_gateway}], ) @@ -66,7 +41,7 @@ def host_docker_network_ip_address(network): pytest.skip("Failed to create docker network: {}".format(ret)) yield network_gateway finally: - sminion.states.docker_network.absent(network_name) + sminion.states.docker_network.absent(docker_network_name) @pytest.fixture(scope="session") diff --git a/tests/pytests/scenarios/performance/test_performance.py b/tests/pytests/scenarios/performance/test_performance.py index 85b92ed986e..22aad753bda 100644 --- a/tests/pytests/scenarios/performance/test_performance.py +++ b/tests/pytests/scenarios/performance/test_performance.py @@ -1,7 +1,9 @@ +import logging import os import shutil -import time +import sys +import attr import pytest from pytestshellutils.utils import ports from saltfactories.daemons import master @@ -9,32 +11,34 @@ from saltfactories.daemons.container import SaltDaemon, SaltMinion from saltfactories.utils import random_string from salt.version import SaltVersionsInfo, __version__ +from tests.conftest import CODE_DIR -pytestmark = [pytest.mark.skip_if_binaries_missing("docker")] +log = logging.getLogger(__name__) + +pytestmark = [ + pytest.mark.skip_if_binaries_missing("docker"), +] -class ContainerMaster(SaltDaemon, master.SaltMaster): +@attr.s(kw_only=True, slots=True) +class SaltMaster(SaltDaemon, master.SaltMaster): """ - Containerized salt master that has no check events + Salt minion daemon implementation running in a docker container. """ def get_display_name(self): + """ + Returns a human readable name for the factory. + """ return master.SaltMaster.get_display_name(self) def get_check_events(self): - return [] + """ + Return salt events to check. - -class ContainerMinion(SaltMinion): - """ - Containerized salt minion that has no check events - """ - - def get_check_events(self): - return [] - - -# ---------------------- Previous Version Setup ---------------------- + Return a list of tuples in the form of `(master_id, event_tag)` check against to ensure the daemon is running + """ + return master.SaltMaster.get_check_events(self) @pytest.fixture @@ -49,7 +53,7 @@ def curr_version(): @pytest.fixture def prev_master_id(): - return random_string("master-performance-prev-", uppercase=False) + return random_string("master-perf-prev-", uppercase=False) @pytest.fixture @@ -57,9 +61,8 @@ def prev_master( request, salt_factories, host_docker_network_ip_address, - network, + docker_network_name, prev_version, - docker_client, prev_master_id, ): root_dir = salt_factories.get_root_dir_for_daemon(prev_master_id) @@ -69,35 +72,36 @@ def prev_master( config_defaults = { "root_dir": str(root_dir), "transport": request.config.getoption("--transport"), - "user": False, + "user": "root", } - publish_port = ports.get_unused_localhost_port() - ret_port = ports.get_unused_localhost_port() config_overrides = { + "open_mode": True, "interface": "0.0.0.0", - "publish_port": publish_port, - "ret_port": ret_port, + "publish_port": ports.get_unused_localhost_port(), + "ret_port": ports.get_unused_localhost_port(), "log_level_logfile": "quiet", "pytest-master": { "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, }, } factory = salt_factories.salt_master_daemon( prev_master_id, + name=prev_master_id, defaults=config_defaults, overrides=config_overrides, - factory_class=ContainerMaster, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), + factory_class=SaltMaster, base_script_args=["--log-level=debug"], + image=f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}", container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": prev_master_id, }, - docker_client=docker_client, - name=prev_master_id, start_timeout=120, max_start_attempts=1, + pull_before_start=True, + skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) with factory.started(): @@ -122,7 +126,7 @@ def prev_salt_run_cli(prev_master): @pytest.fixture def prev_minion_id(): return random_string( - "minion-performance-prev-", + "minion-perf-prev-", uppercase=False, ) @@ -131,34 +135,37 @@ def prev_minion_id(): def prev_minion( prev_minion_id, prev_master, - docker_client, prev_version, host_docker_network_ip_address, - network, - prev_master_id, + docker_network_name, ): config_overrides = { - "master": prev_master_id, - "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "master": prev_master.id, + "open_mode": True, + "user": "root", + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, } factory = prev_master.salt_minion_daemon( prev_minion_id, - overrides=config_overrides, - factory_class=ContainerMinion, - # SaltMinion kwargs name=prev_minion_id, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), - docker_client=docker_client, - start_timeout=120, - pull_before_start=False, - skip_if_docker_client_not_connectable=True, + overrides=config_overrides, + factory_class=SaltMinion, + base_script_args=["--log-level=debug"], + image=f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}", container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": prev_minion_id, }, + start_timeout=60, max_start_attempts=1, + pull_before_start=True, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, ) + factory.python_executable = "python3" factory.after_terminate( pytest.helpers.remove_stale_minion_key, prev_master, factory.id ) @@ -172,21 +179,38 @@ def prev_sls(sls_contents, state_tree, tmp_path): location = tmp_path / "prev" / "testfile" location.parent.mkdir() with pytest.helpers.temp_file( - "{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree + f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree ): yield sls_name -# ---------------------- Current Version Setup ---------------------- +def _install_salt_in_container(container): + ret = container.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() - -def _install_local_salt(factory): - factory.run("pip install /saltcode") + ret = container.run( + "python3", + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + "/salt", + ) + log.debug("Install Salt in the container: %s", ret) + assert ret.returncode == 0 @pytest.fixture def curr_master_id(): - return random_string("master-performance-", uppercase=False) + return random_string("master-perf-curr-", uppercase=False) @pytest.fixture @@ -194,8 +218,7 @@ def curr_master( request, salt_factories, host_docker_network_ip_address, - network, - docker_client, + docker_network_name, curr_master_id, ): root_dir = salt_factories.get_root_dir_for_daemon(curr_master_id) @@ -205,43 +228,46 @@ def curr_master( config_defaults = { "root_dir": str(root_dir), "transport": request.config.getoption("--transport"), - "user": False, + "user": "root", } publish_port = ports.get_unused_localhost_port() ret_port = ports.get_unused_localhost_port() config_overrides = { + "open_mode": True, "interface": "0.0.0.0", "publish_port": publish_port, "ret_port": ret_port, "log_level_logfile": "quiet", "pytest-master": { "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, }, } factory = salt_factories.salt_master_daemon( curr_master_id, + name=curr_master_id, defaults=config_defaults, overrides=config_overrides, - factory_class=ContainerMaster, - image="ghcr.io/saltstack/salt-ci-containers/salt:current", + factory_class=SaltMaster, base_script_args=["--log-level=debug"], + image="ghcr.io/saltstack/salt-ci-containers/salt:current", container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": curr_master_id, # Bind the current code to a directory for pip installing "volumes": { - os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"} + str(CODE_DIR): {"bind": "/salt", "mode": "z"}, }, }, - docker_client=docker_client, - name=curr_master_id, start_timeout=120, max_start_attempts=1, + pull_before_start=True, + skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_local_salt, factory) + factory.before_start(_install_salt_in_container, factory) with factory.started(): yield factory @@ -264,7 +290,7 @@ def curr_salt_key_cli(curr_master): @pytest.fixture def curr_minion_id(): return random_string( - "minion-performance-curr-", + "minion-perf-curr-", uppercase=False, ) @@ -273,38 +299,40 @@ def curr_minion_id(): def curr_minion( curr_minion_id, curr_master, - docker_client, host_docker_network_ip_address, - network, - curr_master_id, + docker_network_name, ): config_overrides = { - "master": curr_master_id, - "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "master": curr_master.id, + "open_mode": True, + "user": "root", + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, } factory = curr_master.salt_minion_daemon( curr_minion_id, - overrides=config_overrides, - factory_class=ContainerMinion, - # SaltMinion kwargs name=curr_minion_id, + overrides=config_overrides, + factory_class=SaltMinion, + base_script_args=["--log-level=debug"], image="ghcr.io/saltstack/salt-ci-containers/salt:current", - docker_client=docker_client, - start_timeout=120, - pull_before_start=False, - skip_if_docker_client_not_connectable=True, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": curr_minion_id, # Bind the current code to a directory for pip installing "volumes": { - os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"} + str(CODE_DIR): {"bind": "/salt", "mode": "z"}, }, }, + start_timeout=120, max_start_attempts=1, + pull_before_start=True, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_local_salt, factory) + factory.before_start(_install_salt_in_container, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, curr_master, factory.id ) @@ -318,25 +346,25 @@ def curr_sls(sls_contents, state_tree, tmp_path): location = tmp_path / "curr" / "testfile" location.parent.mkdir() with pytest.helpers.temp_file( - "{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree + f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree ): yield sls_name -def _wait_for_stdout(expected, func, *args, timeout=120, **kwargs): - start = time.time() - while time.time() < start + timeout: - ret = func(*args, **kwargs) - if ret and ret.stdout and expected in ret.stdout: - break - time.sleep(1) - else: - pytest.skip( - f"Skipping test, one or more daemons failed to start: {expected} not found in {ret}" - ) +@pytest.fixture +def perf_state_name(state_tree, curr_master, prev_master): + + # Copy all of the needed files to both master file roots directories + subdir = random_string("perf-state-") + shutil.copytree( + state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir) + ) + shutil.copytree( + state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir) + ) + return subdir -@pytest.mark.flaky(max_runs=4) def test_performance( prev_salt_cli, prev_minion, @@ -353,48 +381,8 @@ def test_performance( prev_sls, curr_sls, curr_version, + perf_state_name, ): - # Copy all of the needed files to both master file roots directories - subdir = random_string("performance-") - shutil.copytree( - state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir) - ) - shutil.copytree( - state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir) - ) - - # Wait for the old master and minion to start - _wait_for_stdout( - prev_version, prev_master.run, *prev_salt_run_cli.cmdline("--version") - ) - salt_key_cmd = [ - comp - for comp in prev_salt_key_cli.cmdline("-Ay") - if not comp.startswith("--log-level") - ] - _wait_for_stdout(prev_minion.id, prev_master.run, *salt_key_cmd) - _wait_for_stdout( - "Salt: {}".format(prev_version), - prev_master.run, - *prev_salt_cli.cmdline("test.versions", minion_tgt=prev_minion.id), - ) - - # Wait for the new master and minion to start - _wait_for_stdout( - curr_version, curr_master.run, *curr_salt_run_cli.cmdline("--version") - ) - curr_key_cmd = [ - comp - for comp in curr_salt_key_cli.cmdline("-Ay") - if not comp.startswith("--log-level") - ] - _wait_for_stdout(curr_minion.id, curr_master.run, *curr_key_cmd) - _wait_for_stdout( - "Salt: {}".format(curr_version), - curr_master.run, - *curr_salt_cli.cmdline("test.versions", minion_tgt=curr_minion.id), - ) - # Let's now apply the states applies = os.environ.get("SALT_PERFORMANCE_TEST_APPLIES", 3) @@ -423,7 +411,9 @@ def test_performance( for _ in range(applies): prev_state_ret = prev_master.run( *prev_salt_cli.cmdline( - "state.apply", f"{subdir}.{prev_sls}", minion_tgt=prev_minion.id + "state.apply", + f"{perf_state_name}.{prev_sls}", + minion_tgt=prev_minion.id, ) ) prev_duration += _gather_durations(prev_state_ret, prev_minion.id) @@ -431,7 +421,9 @@ def test_performance( for _ in range(applies): curr_state_ret = curr_master.run( *curr_salt_cli.cmdline( - "state.apply", f"{subdir}.{curr_sls}", minion_tgt=curr_minion.id + "state.apply", + f"{perf_state_name}.{curr_sls}", + minion_tgt=curr_minion.id, ) ) curr_duration += _gather_durations(curr_state_ret, curr_minion.id) From ac98d83aa8296461a40a4710997e7a2a3d89af54 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Fri, 24 Nov 2023 22:03:08 +0000 Subject: [PATCH 189/196] Be sure to set the returner address Signed-off-by: Pedro Algarvio --- tests/pytests/scenarios/compat/test_with_versions.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/tests/pytests/scenarios/compat/test_with_versions.py b/tests/pytests/scenarios/compat/test_with_versions.py index 75a2b87f24c..ecb3a73de1a 100644 --- a/tests/pytests/scenarios/compat/test_with_versions.py +++ b/tests/pytests/scenarios/compat/test_with_versions.py @@ -29,7 +29,7 @@ pytestmark = [ def _get_test_versions_ids(value): - return "SaltMinion~={}".format(value) + return f"SaltMinion~={value}" @pytest.fixture( @@ -41,13 +41,13 @@ def compat_salt_version(request): @pytest.fixture(scope="module") def minion_image_name(compat_salt_version): - return "salt-{}".format(compat_salt_version) + return f"salt-{compat_salt_version}" @pytest.fixture(scope="function") def minion_id(compat_salt_version): return random_string( - "salt-{}-".format(compat_salt_version), + f"salt-{compat_salt_version}-", uppercase=False, ) @@ -70,7 +70,10 @@ def salt_minion( config_overrides = { "master": salt_master.config["interface"], "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, # We also want to scrutinize the key acceptance "open_mode": False, } From 27e6e91a7b50a9236957f54b8bc03aae89a88a88 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 23 Nov 2023 11:56:37 +0000 Subject: [PATCH 190/196] Add `tools.in` Py3.12 requirements Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 271 +++++++ requirements/static/ci/py3.12/changelog.txt | 36 + requirements/static/ci/py3.12/cloud.txt | 686 +++++++++++++++++ .../static/ci/py3.12/darwin-crypto.txt | 10 + requirements/static/ci/py3.12/darwin.txt | 482 ++++++++++++ requirements/static/ci/py3.12/docs.txt | 196 +++++ .../static/ci/py3.12/freebsd-crypto.txt | 10 + requirements/static/ci/py3.12/freebsd.txt | 474 ++++++++++++ requirements/static/ci/py3.12/lint.txt | 687 ++++++++++++++++++ .../static/ci/py3.12/linux-crypto.txt | 10 + requirements/static/ci/py3.12/linux.txt | 523 +++++++++++++ .../static/ci/py3.12/tools-virustotal.txt | 28 + requirements/static/ci/py3.12/tools.txt | 76 ++ .../static/ci/py3.12/windows-crypto.txt | 12 + requirements/static/ci/py3.12/windows.txt | 499 +++++++++++++ requirements/static/pkg/py3.12/darwin.txt | 123 ++++ requirements/static/pkg/py3.12/freebsd.txt | 107 +++ requirements/static/pkg/py3.12/linux.txt | 107 +++ requirements/static/pkg/py3.12/windows.txt | 141 ++++ 19 files changed, 4478 insertions(+) create mode 100644 requirements/static/ci/py3.12/changelog.txt create mode 100644 requirements/static/ci/py3.12/cloud.txt create mode 100644 requirements/static/ci/py3.12/darwin-crypto.txt create mode 100644 requirements/static/ci/py3.12/darwin.txt create mode 100644 requirements/static/ci/py3.12/docs.txt create mode 100644 requirements/static/ci/py3.12/freebsd-crypto.txt create mode 100644 requirements/static/ci/py3.12/freebsd.txt create mode 100644 requirements/static/ci/py3.12/lint.txt create mode 100644 requirements/static/ci/py3.12/linux-crypto.txt create mode 100644 requirements/static/ci/py3.12/linux.txt create mode 100644 requirements/static/ci/py3.12/tools-virustotal.txt create mode 100644 requirements/static/ci/py3.12/tools.txt create mode 100644 requirements/static/ci/py3.12/windows-crypto.txt create mode 100644 requirements/static/ci/py3.12/windows.txt create mode 100644 requirements/static/pkg/py3.12/darwin.txt create mode 100644 requirements/static/pkg/py3.12/freebsd.txt create mode 100644 requirements/static/pkg/py3.12/linux.txt create mode 100644 requirements/static/pkg/py3.12/windows.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ee5beec9705..6a62fc6210d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -220,6 +220,21 @@ repos: - --no-emit-index-url - requirements/static/pkg/linux.in + - id: pip-tools-compile + alias: compile-pkg-linux-3.12-zmq-requirements + name: Linux Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.12/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/linux.in + - id: pip-tools-compile alias: compile-pkg-freebsd-3.7-zmq-requirements name: FreeBSD Packaging Py3.7 ZeroMQ Requirements @@ -295,6 +310,21 @@ repos: - --no-emit-index-url - requirements/static/pkg/freebsd.in + - id: pip-tools-compile + alias: compile-pkg-freebsd-3.12-zmq-requirements + name: FreeBSD Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.12/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/freebsd.in + - id: pip-tools-compile alias: compile-pkg-darwin-3.9-zmq-requirements name: Darwin Packaging Py3.9 ZeroMQ Requirements @@ -337,6 +367,20 @@ repos: - --no-emit-index-url - requirements/static/pkg/darwin.in + - id: pip-tools-compile + alias: compile-pkg-darwin-3.12-zmq-requirements + name: Darwin Packaging Py3.12 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|crypto|darwin)\.txt|static/pkg/(darwin\.in|py3\.12/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --include=requirements/darwin.txt + - --no-emit-index-url + - requirements/static/pkg/darwin.in + - id: pip-tools-compile alias: compile-pkg-windows-3.7-zmq-requirements name: Windows Packaging Py3.7 ZeroMQ Requirements @@ -407,6 +451,20 @@ repos: - --no-emit-index-url - requirements/static/pkg/windows.in + - id: pip-tools-compile + alias: compile-pkg-windows-3.12-zmq-requirements + name: Windows Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.12/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --include=requirements/windows.txt + - --no-emit-index-url + - requirements/static/pkg/windows.in + # <---- Packaging Requirements ------------------------------------------------------------------------------------- # ----- CI Requirements -------------------------------------------------------------------------------------------> @@ -500,6 +558,24 @@ repos: - --no-emit-index-url - requirements/static/ci/linux.in + - id: pip-tools-compile + alias: compile-ci-linux-3.12-zmq-requirements + name: Linux CI Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.12/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/linux.in + - id: pip-tools-compile alias: compile-ci-linux-crypto-3.7-requirements name: Linux CI Py3.7 Crypto Requirements @@ -572,6 +648,21 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-linux-crypto-3.12-requirements + name: Linux CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/linux-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --out-prefix=linux + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-freebsd-3.7-zmq-requirements @@ -663,6 +754,24 @@ repos: - --no-emit-index-url - requirements/static/ci/freebsd.in + - id: pip-tools-compile + alias: compile-ci-freebsd-3.12-zmq-requirements + name: FreeBSD CI Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.12/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/freebsd.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/freebsd.in + - id: pip-tools-compile alias: compile-ci-freebsd-crypto-3.7-requirements name: FreeBSD CI Py3.7 Crypto Requirements @@ -735,6 +844,21 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-freebsd-crypto-3.12-requirements + name: FreeBSD CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/freebsd-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --out-prefix=freebsd + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-darwin-3.9-zmq-requirements name: Darwin CI Py3.9 ZeroMQ Requirements @@ -786,6 +910,23 @@ repos: - --no-emit-index-url - requirements/static/ci/darwin.in + - id: pip-tools-compile + alias: compile-ci-darwin-3.12-zmq-requirements + name: Darwin CI Py3.12 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.12/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --include=requirements/darwin.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/darwin.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/darwin.in + - id: pip-tools-compile alias: compile-ci-darwin-crypto-3.9-requirements name: Darwin CI Py3.9 Crypto Requirements @@ -828,6 +969,20 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-darwin-crypto-3.12-requirements + name: Darwin CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/darwin-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --out-prefix=darwin + - --no-emit-index-url + - requirements/static/ci/crypto.in + - id: pip-tools-compile alias: compile-ci-windows-3.7-zmq-requirements name: Windows CI Py3.7 ZeroMQ Requirements @@ -913,6 +1068,23 @@ repos: - --no-emit-index-url - requirements/static/ci/windows.in + - id: pip-tools-compile + alias: compile-ci-windows-3.12-zmq-requirements + name: Windows CI Py3.12 ZeroMQ Requirements + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.12/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --include=requirements/windows.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/windows.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/windows.in + - id: pip-tools-compile alias: compile-ci-windows-crypto-3.7-requirements name: Windows CI Py3.7 Crypto Requirements @@ -983,6 +1155,20 @@ repos: - --no-emit-index-url - requirements/static/ci/crypto.in + - id: pip-tools-compile + alias: compile-ci-windows-crypto-3.12-requirements + name: Windows CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/windows-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --out-prefix=windows + - --no-emit-index-url + - requirements/static/ci/crypto.in + # <---- CI Requirements -------------------------------------------------------------------------------------------- @@ -1071,6 +1257,23 @@ repos: - --include=requirements/static/ci/common.in - --no-emit-index-url - requirements/static/ci/cloud.in + + - id: pip-tools-compile + alias: compile-ci-cloud-3.12-requirements + name: Cloud CI Py3.12 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.12/cloud\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/cloud.in # <---- Cloud CI Requirements -------------------------------------------------------------------------------------- # ----- Doc CI Requirements ---------------------------------------------------------------------------------------> @@ -1149,6 +1352,21 @@ repos: - --no-emit-index-url - requirements/static/ci/docs.in + - id: pip-tools-compile + alias: compile-doc-requirements + name: Docs CI Py3.12 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/ci/docs.in + # <---- Doc CI Requirements ---------------------------------------------------------------------------------------- # ----- Lint CI Requirements --------------------------------------------------------------------------------------> @@ -1242,6 +1460,24 @@ repos: - --no-emit-index-url - requirements/static/ci/lint.in + - id: pip-tools-compile + alias: compile-ci-lint-3.12-requirements + name: Lint CI Py3.12 Requirements + files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.12/linux\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/lint.in + # <---- Lint CI Requirements --------------------------------------------------------------------------------------- # ----- Changelog -------------------------------------------------------------------------------------------------> @@ -1296,6 +1532,19 @@ repos: - --platform=linux - --no-emit-index-url - requirements/static/ci/changelog.in + + - id: pip-tools-compile + alias: compile-ci-changelog-3.12-requirements + name: Changelog CI Py3.12 Requirements + files: ^requirements/static/ci/(changelog\.in|py3\.12/(changelog|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --no-emit-index-url + - requirements/static/ci/changelog.in # <---- Changelog -------------------------------------------------------------------------------------------------- # ----- Tools ----------------------------------------------------------------------------------------------------> @@ -1335,6 +1584,18 @@ repos: - --no-emit-index-url - requirements/static/ci/tools.in + - id: pip-tools-compile + alias: compile-ci-tools-3.12-requirements + name: Linux CI Py3.12 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.12/(tools|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --no-emit-index-url + - requirements/static/ci/tools.in + - id: pip-tools-compile alias: compile-ci-tools-virustotal-3.9-requirements name: Linux CI Py3.9 Tools virustotal Requirements @@ -1364,6 +1625,16 @@ repos: - -v - --py-version=3.11 - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.12-requirements + name: Linux CI Py3.12 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.12/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.12 + - requirements/static/ci/tools-virustotal.in # <---- Tools ----------------------------------------------------------------------------------------------------- # ----- Code Formatting -------------------------------------------------------------------------------------------> diff --git a/requirements/static/ci/py3.12/changelog.txt b/requirements/static/ci/py3.12/changelog.txt new file mode 100644 index 00000000000..6a252d0cd70 --- /dev/null +++ b/requirements/static/ci/py3.12/changelog.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/changelog.txt requirements/static/ci/changelog.in +# +click-default-group==1.2.2 + # via towncrier +click==7.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # click-default-group + # towncrier +incremental==17.5.0 + # via towncrier +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/changelog.in +towncrier==22.12.0 + # via -r requirements/static/ci/changelog.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt new file mode 100644 index 00000000000..436b9041cf3 --- /dev/null +++ b/requirements/static/ci/py3.12/cloud.txt @@ -0,0 +1,686 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/cloud.in + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # pyspnego + # requests-ntlm + # smbprotocol + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +flaky==3.7.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # yarl +netaddr==0.7.19 + # via -r requirements/static/ci/cloud.in +ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +profitbricks==4.1.3 + # via -r requirements/static/ci/cloud.in +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pypsexec==0.1.0 + # via -r requirements/static/ci/cloud.in +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jsonschema +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol +pytest-custom-exit-code==0.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pywinrm==0.3.0 + # via -r requirements/static/ci/cloud.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # profitbricks + # pyvmomi + # pywinrm + # requests-ntlm + # responses + # vcert +responses==0.10.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # profitbricks + # pypsexec + # python-dateutil + # pyvmomi + # pywinrm + # responses + # vcert + # virtualenv + # websocket-client +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto + # pywinrm +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/darwin-crypto.txt b/requirements/static/ci/py3.12/darwin-crypto.txt new file mode 100644 index 00000000000..e67841ff8fa --- /dev/null +++ b/requirements/static/ci/py3.12/darwin-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt new file mode 100644 index 00000000000..0f39978897b --- /dev/null +++ b/requirements/static/ci/py3.12/darwin.txt @@ -0,0 +1,482 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +click==7.0 + # via geomet +clustershell==1.8.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # moto + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/darwin.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +linode-python==1.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/darwin.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.1 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/darwin.in +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint + # yamlordereddictloader +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert + # vultr +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.2.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +vultr==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/darwin.in +yamlordereddictloader==0.4.0 + # via -r requirements/static/ci/darwin.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt new file mode 100644 index 00000000000..70ddc3f6eb8 --- /dev/null +++ b/requirements/static/ci/py3.12/docs.txt @@ -0,0 +1,196 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# +alabaster==0.7.12 + # via sphinx +babel==2.9.1 + # via sphinx +certifi==2023.07.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheroot==8.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/docs.in +contextvars==2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +distro==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +docutils==0.19 + # via sphinx +idna==3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +imagesize==1.4.1 + # via sphinx +immutables==0.15 + # via + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # myst-docutils + # sphinx +jmespath==1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +linkify-it-py==1.0.3 + # via myst-docutils +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +markdown-it-py==2.2.0 + # via + # mdit-py-plugins + # myst-docutils +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 +mdit-py-plugins==0.3.3 + # via myst-docutils +mdurl==0.1.2 + # via markdown-it-py +more-itertools==5.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +myst-docutils[linkify]==0.18.1 + # via -r requirements/static/ci/docs.in +packaging==22.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # sphinx +portend==2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pyenchant==3.2.2 + # via sphinxcontrib-spelling +pygments==2.14.0 + # via sphinx +pytz==2022.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # babel + # tempora +pyyaml==6.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # myst-docutils +pyzmq==23.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt +requests==2.31.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # sphinx +six==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # more-itertools + # sphinxcontrib.httpdomain +snowballstemmer==2.1.0 + # via sphinx +sphinx==6.1.3 ; python_version >= "3.9" + # via + # -r requirements/static/ci/docs.in + # sphinxcontrib-spelling + # sphinxcontrib.httpdomain +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==7.7.0 + # via -r requirements/static/ci/docs.in +sphinxcontrib.httpdomain==1.8.1 + # via -r requirements/static/ci/docs.in +tempora==4.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # portend +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # myst-docutils +uc-micro-py==1.0.1 + # via linkify-it-py +urllib3==1.26.18 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +zc.lockfile==1.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/freebsd-crypto.txt b/requirements/static/ci/py3.12/freebsd-crypto.txt new file mode 100644 index 00000000000..7bdbdbc6cad --- /dev/null +++ b/requirements/static/ci/py3.12/freebsd-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt new file mode 100644 index 00000000000..6a881aab279 --- /dev/null +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -0,0 +1,474 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.24.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in +click==7.1.2 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/freebsd.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/freebsd.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/freebsd.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/freebsd.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt new file mode 100644 index 00000000000..312bce9f5ee --- /dev/null +++ b/requirements/static/ci/py3.12/lint.txt @@ -0,0 +1,687 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +ansible-core==2.14.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # ansible +ansible==7.1.0 ; python_version >= "3.9" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator + # oscrypto +astroid==2.3.3 + # via pylint +async-timeout==4.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # jsonschema +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +hglib==2.6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +isort==4.3.21 + # via pylint +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +lazy-object-proxy==1.4.3 + # via astroid +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mccabe==0.6.1 + # via pylint +mercurial==6.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +modernize==0.5 + # via saltpylint +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # yarl +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # docker +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pathspec==0.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pyasn1-modules + # rsa +pycodestyle==2.5.0 + # via saltpylint +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # twilio +pylint==2.4.4 + # via + # -r requirements/static/ci/lint.in + # saltpylint +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jsonschema +python-consul==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt +redis-py-cluster==2.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +redis==3.5.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 +saltpylint==2023.8.3 + # via -r requirements/static/ci/lint.in +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # apscheduler + # astroid + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # slack-bolt +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/ci/lint.in +tornado==6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # python-telegram-bot +twilio==7.9.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +tzlocal==3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +wrapt==1.11.1 + # via astroid +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +yamllint==1.26.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/linux-crypto.txt b/requirements/static/ci/py3.12/linux-crypto.txt new file mode 100644 index 00000000000..be01a017e8b --- /dev/null +++ b/requirements/static/ci/py3.12/linux-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/linux-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt new file mode 100644 index 00000000000..e1c4b8b2a40 --- /dev/null +++ b/requirements/static/ci/py3.12/linux.txt @@ -0,0 +1,523 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +ansible-core==2.14.1 + # via ansible +ansible==7.1.0 ; python_version >= "3.9" + # via -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via python-telegram-bot +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/linux.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via twilio +pymysql==1.0.2 + # via -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-consul==1.1.0 + # via -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +redis-py-cluster==2.1.3 + # via -r requirements/static/ci/linux.in +redis==3.5.3 + # via redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via ansible-core +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # apscheduler + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via slack-bolt +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.1 + # via python-telegram-bot +twilio==7.9.2 + # via -r requirements/static/ci/linux.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +tzlocal==3.0 + # via apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/linux.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/tools-virustotal.txt b/requirements/static/ci/py3.12/tools-virustotal.txt new file mode 100644 index 00000000000..af03eeef1b2 --- /dev/null +++ b/requirements/static/ci/py3.12/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.12/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.12/tools.txt b/requirements/static/ci/py3.12/tools.txt new file mode 100644 index 00000000000..170d7243ba4 --- /dev/null +++ b/requirements/static/ci/py3.12/tools.txt @@ -0,0 +1,76 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/tools.txt requirements/static/ci/tools.in +# +attrs==22.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.21.46 + # via -r requirements/static/ci/tools.in +botocore==1.24.46 + # via + # boto3 + # s3transfer +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +commonmark==0.9.1 + # via rich +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/tools.in +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # boto3 + # botocore +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/tools.in +pygments==2.13.0 + # via rich +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # botocore +python-tools-scripts==0.18.3 + # via -r requirements/static/ci/tools.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/tools.in +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # python-tools-scripts +rich==12.5.1 + # via python-tools-scripts +s3transfer==0.5.2 + # via boto3 +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # python-dateutil +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # botocore + # requests diff --git a/requirements/static/ci/py3.12/windows-crypto.txt b/requirements/static/ci/py3.12/windows-crypto.txt new file mode 100644 index 00000000000..ec84d96324e --- /dev/null +++ b/requirements/static/ci/py3.12/windows-crypto.txt @@ -0,0 +1,12 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/windows-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.37.1 + # via -r requirements/static/ci/crypto.in +parameterized==0.8.1 + # via m2crypto +pycryptodome==3.10.1 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt new file mode 100644 index 00000000000..7d2ef5f7584 --- /dev/null +++ b/requirements/static/ci/py3.12/windows.txt @@ -0,0 +1,499 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# +aiohttp==3.8.5 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +async-timeout==4.0.2 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +bcrypt==4.0.1 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # kubernetes + # requests +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # clr-loader + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # aiohttp + # requests +cheetah3==3.2.6.post1 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +click==7.1.2 + # via geomet +clr-loader==0.2.6 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # pythonnet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +colorama==0.4.1 + # via pytest +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt +cryptography==41.0.4 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # etcd3-py + # moto + # pyopenssl + # requests-ntlm +distlib==0.3.6 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # pytest-skip-markers +dmidecode==0.9.0 + # via -r requirements/static/ci/windows.in +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.8.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.3 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +google-auth==2.1.0 + # via kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +iniconfig==1.0.1 + # via pytest +ioloop==0.1a0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # jaraco.collections +jaraco.collections==3.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt +lxml==4.9.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +ntlm-auth==1.5.0 + # via requests-ntlm +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +patch==1.16 + # via -r requirements/static/ci/windows.in +pathspec==0.10.2 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.5.4 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via -r requirements/static/ci/windows.in +pymssql==2.2.7 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pymysql==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc27 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # botocore + # kubernetes + # moto +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pythonnet==3.0.3 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pywin32==306 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # docker + # pytest-skip-markers + # wmi +pywinrm==0.4.1 + # via -r requirements/static/ci/windows.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==25.0.2 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # pywinrm + # requests-ntlm + # responses +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +sed==0.3.1 + # via -r requirements/static/ci/windows.in +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +six==1.15.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # pywinrm + # responses + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.4.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +virtualenv==20.17.0 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +wheel==0.38.4 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +wmi==1.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +xmltodict==0.12.0 + # via + # moto + # pywinrm +yamllint==1.28.0 + # via -r requirements/static/ci/windows.in +yarl==1.8.1 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt new file mode 100644 index 00000000000..dd48cc6762c --- /dev/null +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -0,0 +1,123 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# +apache-libcloud==2.5.0 + # via -r requirements/darwin.txt +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/darwin.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/darwin.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/darwin.txt +idna==3.2 + # via + # -r requirements/darwin.txt + # requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/darwin.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +linode-python==1.1.1 + # via -r requirements/darwin.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/darwin.txt +pycparser==2.21 + # via + # -r requirements/darwin.txt + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/darwin.txt +python-dateutil==2.8.0 + # via -r requirements/darwin.txt +python-gnupg==0.4.8 + # via -r requirements/darwin.txt +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # apache-libcloud + # vultr +setproctitle==1.3.2 + # via -r requirements/darwin.txt +six==1.16.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/darwin.txt +urllib3==1.26.18 + # via requests +vultr==1.0.1 + # via -r requirements/darwin.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt new file mode 100644 index 00000000000..f8e48894965 --- /dev/null +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/freebsd.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl +distro==1.5.0 + # via + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/freebsd.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/freebsd.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/freebsd.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/freebsd.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt new file mode 100644 index 00000000000..3527eab687b --- /dev/null +++ b/requirements/static/pkg/py3.12/linux.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/linux.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/linux.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/linux.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/linux.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/linux.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +rpm-vercmp==0.1.2 + # via -r requirements/static/pkg/linux.in +setproctitle==1.3.2 + # via -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/linux.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.6.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt new file mode 100644 index 00000000000..a684cf1f5d7 --- /dev/null +++ b/requirements/static/pkg/py3.12/windows.txt @@ -0,0 +1,141 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# +certifi==2023.07.22 + # via + # -r requirements/windows.txt + # requests +cffi==1.14.6 + # via + # -r requirements/windows.txt + # clr-loader + # cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/windows.txt +clr-loader==0.2.6 + # via pythonnet +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.4 + # via + # -r requirements/windows.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/windows.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/windows.txt +ioloop==0.1a0 + # via -r requirements/windows.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.3.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +lxml==4.9.1 + # via -r requirements/windows.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/windows.txt +pycparser==2.21 + # via + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via -r requirements/crypto.txt +pymssql==2.2.7 + # via -r requirements/windows.txt +pymysql==1.0.2 + # via -r requirements/windows.txt +pyopenssl==23.2.0 + # via -r requirements/windows.txt +python-dateutil==2.8.1 + # via -r requirements/windows.txt +python-gnupg==0.4.8 + # via -r requirements/windows.txt +pythonnet==3.0.3 + # via -r requirements/windows.txt +pytz==2022.1 + # via tempora +pywin32==306 + # via + # -r requirements/windows.txt + # wmi +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.0.2 ; sys_platform == "win32" + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # -r requirements/windows.txt +setproctitle==1.3.2 + # via -r requirements/windows.txt +six==1.15.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/windows.txt +urllib3==1.26.18 + # via + # -r requirements/windows.txt + # requests +wheel==0.38.4 + # via -r requirements/windows.txt +wmi==1.5.1 + # via -r requirements/windows.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools From 8e5c1da7a34f9d20f64dd437fffd7e9216524472 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Thu, 23 Nov 2023 18:17:47 +0000 Subject: [PATCH 191/196] Bump to `python-tools-scripts>=0.18.5` Signed-off-by: Pedro Algarvio --- .pre-commit-config.yaml | 4 ++-- requirements/static/ci/py3.10/tools.txt | 2 +- requirements/static/ci/py3.11/tools.txt | 2 +- requirements/static/ci/py3.12/tools.txt | 2 +- requirements/static/ci/py3.9/tools.txt | 2 +- requirements/static/ci/tools.in | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6a62fc6210d..9dfcdf6d4c0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,7 +46,7 @@ repos: )$ - repo: https://github.com/s0undt3ch/python-tools-scripts - rev: "0.18.3" + rev: "0.18.5" hooks: - id: tools alias: check-changelog-entries @@ -1762,7 +1762,7 @@ repos: - types-attrs - types-pyyaml - types-requests - - python-tools-scripts>=0.18.3 + - python-tools-scripts>=0.18.4 - repo: https://github.com/saltstack/mirrors-nox rev: v2021.6.12 diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index 69f0c3896c0..199f02dba78 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -53,7 +53,7 @@ python-dateutil==2.8.1 # via # -c requirements/static/ci/../pkg/py3.10/linux.txt # botocore -python-tools-scripts==0.18.3 +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 # via diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt index 06046989a38..14ba73f19c0 100644 --- a/requirements/static/ci/py3.11/tools.txt +++ b/requirements/static/ci/py3.11/tools.txt @@ -51,7 +51,7 @@ python-dateutil==2.8.1 # via # -c requirements/static/ci/../pkg/py3.11/linux.txt # botocore -python-tools-scripts==0.18.3 +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 # via diff --git a/requirements/static/ci/py3.12/tools.txt b/requirements/static/ci/py3.12/tools.txt index 170d7243ba4..1d163af7579 100644 --- a/requirements/static/ci/py3.12/tools.txt +++ b/requirements/static/ci/py3.12/tools.txt @@ -51,7 +51,7 @@ python-dateutil==2.8.1 # via # -c requirements/static/ci/../pkg/py3.12/linux.txt # botocore -python-tools-scripts==0.18.3 +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 # via diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index 018373ce635..a8be31ff28d 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -53,7 +53,7 @@ python-dateutil==2.8.1 # via # -c requirements/static/ci/../pkg/py3.9/linux.txt # botocore -python-tools-scripts==0.18.3 +python-tools-scripts==0.18.5 # via -r requirements/static/ci/tools.in pyyaml==6.0.1 # via diff --git a/requirements/static/ci/tools.in b/requirements/static/ci/tools.in index 143cab05113..367eb857b4a 100644 --- a/requirements/static/ci/tools.in +++ b/requirements/static/ci/tools.in @@ -1,7 +1,7 @@ --constraint=../pkg/py{py_version}/{platform}.txt attrs -python-tools-scripts >= 0.18.3 +python-tools-scripts >= 0.18.5 boto3 pyyaml jinja2 From 72aa076c59f839338db55559accb53f6e79de793 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 25 Nov 2023 11:54:01 +0000 Subject: [PATCH 192/196] Fix the libvirt tests Signed-off-by: Pedro Algarvio --- .../pytests/integration/modules/test_virt.py | 34 ++++++++++++++++++- 1 file changed, 33 insertions(+), 1 deletion(-) diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py index b2c72d60747..adafc517448 100644 --- a/tests/pytests/integration/modules/test_virt.py +++ b/tests/pytests/integration/modules/test_virt.py @@ -2,6 +2,7 @@ Validate the virt module """ import logging +import sys from numbers import Number from xml.etree import ElementTree @@ -16,11 +17,34 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_unless_on_linux, pytest.mark.skip_if_binaries_missing("docker"), ] def _install_salt_dependencies(container): + ret = container.run("bash", "-c", "echo $SALT_PY_VERSION") + assert ret.returncode == 0 + if not ret.stdout: + log.warning( + "The 'SALT_PY_VERSION' environment variable is not set on the container" + ) + salt_py_version = 3 + ret = container.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "py{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() + else: + salt_py_version = requirements_py_version = ret.stdout.strip() + + container.python_executable = f"python{salt_py_version}" + dependencies = [] for package, version in salt.version.dependency_information(): if package not in ("packaging", "looseversion"): @@ -29,8 +53,16 @@ def _install_salt_dependencies(container): continue dependencies.append(f"{package}=={version}") if dependencies: - ret = container.run("python3", "-m", "pip", "install", *dependencies) + ret = container.run( + container.python_executable, + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + *dependencies, + ) log.debug("Install missing dependecies ret: %s", ret) + assert ret.returncode == 0 @pytest.fixture(scope="module") From 4ee029f1f7024bc3fa7691c1a1c6661ed46d8948 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 25 Nov 2023 12:00:17 +0000 Subject: [PATCH 193/196] Move the install salt routine to the custom salt minion class implementation Signed-off-by: Pedro Algarvio --- .../pytests/integration/modules/test_virt.py | 48 ------------------- tests/support/virt.py | 39 +++++++++++++++ 2 files changed, 39 insertions(+), 48 deletions(-) diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py index adafc517448..5114f39c9a6 100644 --- a/tests/pytests/integration/modules/test_virt.py +++ b/tests/pytests/integration/modules/test_virt.py @@ -2,13 +2,11 @@ Validate the virt module """ import logging -import sys from numbers import Number from xml.etree import ElementTree import pytest -import salt.version from tests.support.virt import SaltVirtMinionContainerFactory docker = pytest.importorskip("docker") @@ -17,54 +15,10 @@ log = logging.getLogger(__name__) pytestmark = [ pytest.mark.slow_test, - pytest.mark.skip_unless_on_linux, pytest.mark.skip_if_binaries_missing("docker"), ] -def _install_salt_dependencies(container): - ret = container.run("bash", "-c", "echo $SALT_PY_VERSION") - assert ret.returncode == 0 - if not ret.stdout: - log.warning( - "The 'SALT_PY_VERSION' environment variable is not set on the container" - ) - salt_py_version = 3 - ret = container.run( - "python3", - "-c", - "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", - ) - assert ret.returncode == 0 - if not ret.stdout: - requirements_py_version = "py{}.{}".format(*sys.version_info) - else: - requirements_py_version = ret.stdout.strip() - else: - salt_py_version = requirements_py_version = ret.stdout.strip() - - container.python_executable = f"python{salt_py_version}" - - dependencies = [] - for package, version in salt.version.dependency_information(): - if package not in ("packaging", "looseversion"): - # These are newer base dependencies which the container might not - # yet have - continue - dependencies.append(f"{package}=={version}") - if dependencies: - ret = container.run( - container.python_executable, - "-m", - "pip", - "install", - f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", - *dependencies, - ) - log.debug("Install missing dependecies ret: %s", ret) - assert ret.returncode == 0 - - @pytest.fixture(scope="module") def virt_minion_0_id(): return "virt-minion-0" @@ -105,7 +59,6 @@ def virt_minion_0( skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_salt_dependencies, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, salt_master, factory.id ) @@ -143,7 +96,6 @@ def virt_minion_1( skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_salt_dependencies, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, salt_master, factory.id ) diff --git a/tests/support/virt.py b/tests/support/virt.py index f374d243d5c..213b88b4d9c 100644 --- a/tests/support/virt.py +++ b/tests/support/virt.py @@ -1,3 +1,5 @@ +import logging +import sys import time import uuid @@ -7,6 +9,8 @@ from saltfactories.daemons.container import SaltMinion from tests.conftest import CODE_DIR +log = logging.getLogger(__name__) + @attr.s(kw_only=True, slots=True) class SaltVirtMinionContainerFactory(SaltMinion): @@ -64,6 +68,7 @@ class SaltVirtMinionContainerFactory(SaltMinion): self.container_start_check(self._check_script_path_exists) for port in (self.sshd_port, self.libvirt_tcp_port, self.libvirt_tls_port): self.check_ports[port] = port + self.before_start(self._install_salt_in_container) def _check_script_path_exists(self, timeout_at): while time.time() <= timeout_at: @@ -76,3 +81,37 @@ class SaltVirtMinionContainerFactory(SaltMinion): else: return False return True + + def _install_salt_in_container(self): + ret = self.run("bash", "-c", "echo $SALT_PY_VERSION") + assert ret.returncode == 0 + if not ret.stdout: + log.warning( + "The 'SALT_PY_VERSION' environment variable is not set on the container" + ) + salt_py_version = 3 + ret = self.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() + else: + salt_py_version = requirements_py_version = ret.stdout.strip() + + self.python_executable = f"python{salt_py_version}" + + ret = self.run( + self.python_executable, + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + "/salt", + ) + log.debug("Install Salt in the container: %s", ret) + assert ret.returncode == 0 From 705581130a1a55666c4374f022772be0971542b1 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sat, 25 Nov 2023 21:58:47 +0000 Subject: [PATCH 194/196] Bump to `pytest-salt-factories==1.0.0rc28` Signed-off-by: Pedro Algarvio --- requirements/pytest.txt | 2 +- requirements/static/ci/py3.10/cloud.txt | 2 +- requirements/static/ci/py3.10/darwin.txt | 2 +- requirements/static/ci/py3.10/freebsd.txt | 2 +- requirements/static/ci/py3.10/linux.txt | 2 +- requirements/static/ci/py3.10/windows.txt | 2 +- requirements/static/ci/py3.11/cloud.txt | 2 +- requirements/static/ci/py3.11/darwin.txt | 2 +- requirements/static/ci/py3.11/freebsd.txt | 2 +- requirements/static/ci/py3.11/linux.txt | 2 +- requirements/static/ci/py3.11/windows.txt | 2 +- requirements/static/ci/py3.12/cloud.txt | 2 +- requirements/static/ci/py3.12/darwin.txt | 2 +- requirements/static/ci/py3.12/freebsd.txt | 2 +- requirements/static/ci/py3.12/linux.txt | 2 +- requirements/static/ci/py3.12/windows.txt | 2 +- requirements/static/ci/py3.7/cloud.txt | 2 +- requirements/static/ci/py3.7/freebsd.txt | 2 +- requirements/static/ci/py3.7/linux.txt | 2 +- requirements/static/ci/py3.7/windows.txt | 2 +- requirements/static/ci/py3.8/cloud.txt | 2 +- requirements/static/ci/py3.8/freebsd.txt | 2 +- requirements/static/ci/py3.8/linux.txt | 2 +- requirements/static/ci/py3.8/windows.txt | 2 +- requirements/static/ci/py3.9/cloud.txt | 2 +- requirements/static/ci/py3.9/darwin.txt | 2 +- requirements/static/ci/py3.9/freebsd.txt | 2 +- requirements/static/ci/py3.9/linux.txt | 2 +- requirements/static/ci/py3.9/windows.txt | 2 +- 29 files changed, 29 insertions(+), 29 deletions(-) diff --git a/requirements/pytest.txt b/requirements/pytest.txt index cafa2ec25a1..c497736194f 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -2,7 +2,7 @@ mock >= 3.0.0 # PyTest docker pytest >= 7.2.0 -pytest-salt-factories >= 1.0.0rc27 +pytest-salt-factories >= 1.0.0rc28 pytest-helpers-namespace >= 2019.1.8 pytest-subtests pytest-timeout diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index 55c1479cf3f..dce9c865d08 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -466,7 +466,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 5e0b7277879..ad65da63fdf 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -328,7 +328,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index 2caa3f55787..f54efd23613 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -321,7 +321,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index 8b70902a83d..28410e4582c 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -338,7 +338,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 65f3feaa099..7cbfcb3d76b 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -305,7 +305,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt index b2ff4c59338..00380143eda 100644 --- a/requirements/static/ci/py3.11/cloud.txt +++ b/requirements/static/ci/py3.11/cloud.txt @@ -435,7 +435,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via # -c requirements/static/ci/py3.11/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt index 0c6824eb714..b345717fc5c 100644 --- a/requirements/static/ci/py3.11/darwin.txt +++ b/requirements/static/ci/py3.11/darwin.txt @@ -302,7 +302,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt index b290eea30b4..7e3b8dde4b6 100644 --- a/requirements/static/ci/py3.11/freebsd.txt +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -301,7 +301,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt index 8530773540b..7642f663711 100644 --- a/requirements/static/ci/py3.11/linux.txt +++ b/requirements/static/ci/py3.11/linux.txt @@ -318,7 +318,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt index 1565296a17f..42783c12d3d 100644 --- a/requirements/static/ci/py3.11/windows.txt +++ b/requirements/static/ci/py3.11/windows.txt @@ -304,7 +304,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt index 436b9041cf3..f961291258b 100644 --- a/requirements/static/ci/py3.12/cloud.txt +++ b/requirements/static/ci/py3.12/cloud.txt @@ -435,7 +435,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via # -c requirements/static/ci/py3.12/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt index 0f39978897b..d4af3029d59 100644 --- a/requirements/static/ci/py3.12/darwin.txt +++ b/requirements/static/ci/py3.12/darwin.txt @@ -302,7 +302,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt index 6a881aab279..4756e3b84f2 100644 --- a/requirements/static/ci/py3.12/freebsd.txt +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -301,7 +301,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt index e1c4b8b2a40..4159822b1ad 100644 --- a/requirements/static/ci/py3.12/linux.txt +++ b/requirements/static/ci/py3.12/linux.txt @@ -318,7 +318,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt index 7d2ef5f7584..29054277a7a 100644 --- a/requirements/static/ci/py3.12/windows.txt +++ b/requirements/static/ci/py3.12/windows.txt @@ -304,7 +304,7 @@ pytest-helpers-namespace==2021.12.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index abc60cb0cd3..07718e2ebf0 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -517,7 +517,7 @@ pytest-httpserver==1.0.6 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 691ca070cd1..dee9c44a021 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -363,7 +363,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index fa6e4a13411..c28094bdfb9 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -382,7 +382,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index 1c42e998471..f8a5429f15d 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -319,7 +319,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 0234878abc3..1a186d6b0e1 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -504,7 +504,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index fdd96b6f0ca..135a969033a 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -350,7 +350,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 304a5afb7b8..7654faf88a8 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -369,7 +369,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 9bbaf88cdbe..12b19475df9 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -306,7 +306,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index 138ed879cb1..d10bc1ebe05 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -506,7 +506,7 @@ pytest-httpserver==1.0.8 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 0cd59678c16..8a92c77bc02 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -359,7 +359,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 822fbfcfbe9..066fe2df855 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -352,7 +352,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 8b0445ed84f..182fff79edb 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -369,7 +369,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 7da6d02dcc9..fe1c11883fc 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -307,7 +307,7 @@ pytest-helpers-namespace==2021.4.29 # pytest-shell-utilities pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc27 +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt pytest-shell-utilities==1.8.0 # via pytest-salt-factories From e37e984427e2ed60f17a6113bc0a07b895859523 Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 26 Nov 2023 07:23:23 +0000 Subject: [PATCH 195/196] Pull the containers prior to starting the daemons. Signed-off-by: Pedro Algarvio --- .../scenarios/performance/test_performance.py | 75 +++++++++---------- 1 file changed, 37 insertions(+), 38 deletions(-) diff --git a/tests/pytests/scenarios/performance/test_performance.py b/tests/pytests/scenarios/performance/test_performance.py index 22aad753bda..12749a6afd8 100644 --- a/tests/pytests/scenarios/performance/test_performance.py +++ b/tests/pytests/scenarios/performance/test_performance.py @@ -3,14 +3,12 @@ import os import shutil import sys -import attr import pytest from pytestshellutils.utils import ports -from saltfactories.daemons import master -from saltfactories.daemons.container import SaltDaemon, SaltMinion +from saltfactories.daemons.container import SaltMaster, SaltMinion from saltfactories.utils import random_string -from salt.version import SaltVersionsInfo, __version__ +from salt.version import SaltVersionsInfo from tests.conftest import CODE_DIR log = logging.getLogger(__name__) @@ -20,37 +18,34 @@ pytestmark = [ ] -@attr.s(kw_only=True, slots=True) -class SaltMaster(SaltDaemon, master.SaltMaster): - """ - Salt minion daemon implementation running in a docker container. - """ - - def get_display_name(self): - """ - Returns a human readable name for the factory. - """ - return master.SaltMaster.get_display_name(self) - - def get_check_events(self): - """ - Return salt events to check. - - Return a list of tuples in the form of `(master_id, event_tag)` check against to ensure the daemon is running - """ - return master.SaltMaster.get_check_events(self) - - @pytest.fixture def prev_version(): return str(SaltVersionsInfo.previous_release().info[0]) +@pytest.fixture +def prev_container_image(shell, prev_version): + container = f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}" + ret = shell.run("docker", "pull", container, check=False) + if ret.returncode: + pytest.skip(f"Failed to pull docker image '{container}':\n{ret}") + return container + + @pytest.fixture def curr_version(): return str(SaltVersionsInfo.current_release().info[0]) +@pytest.fixture +def curr_container_image(shell): + container = "ghcr.io/saltstack/salt-ci-containers/salt:latest" + ret = shell.run("docker", "pull", container, check=False) + if ret.returncode: + pytest.skip(f"Failed to pull docker image '{container}':\n{ret}") + return container + + @pytest.fixture def prev_master_id(): return random_string("master-perf-prev-", uppercase=False) @@ -64,6 +59,7 @@ def prev_master( docker_network_name, prev_version, prev_master_id, + prev_container_image, ): root_dir = salt_factories.get_root_dir_for_daemon(prev_master_id) conf_dir = root_dir / "conf" @@ -93,14 +89,14 @@ def prev_master( overrides=config_overrides, factory_class=SaltMaster, base_script_args=["--log-level=debug"], - image=f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}", + image=prev_container_image, container_run_kwargs={ "network": docker_network_name, "hostname": prev_master_id, }, start_timeout=120, - max_start_attempts=1, - pull_before_start=True, + max_start_attempts=3, + pull_before_start=False, skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) @@ -138,6 +134,7 @@ def prev_minion( prev_version, host_docker_network_ip_address, docker_network_name, + prev_container_image, ): config_overrides = { "master": prev_master.id, @@ -154,14 +151,14 @@ def prev_minion( overrides=config_overrides, factory_class=SaltMinion, base_script_args=["--log-level=debug"], - image=f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}", + image=prev_container_image, container_run_kwargs={ "network": docker_network_name, "hostname": prev_minion_id, }, - start_timeout=60, - max_start_attempts=1, - pull_before_start=True, + start_timeout=120, + max_start_attempts=3, + pull_before_start=False, skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) @@ -220,6 +217,7 @@ def curr_master( host_docker_network_ip_address, docker_network_name, curr_master_id, + curr_container_image, ): root_dir = salt_factories.get_root_dir_for_daemon(curr_master_id) conf_dir = root_dir / "conf" @@ -251,7 +249,7 @@ def curr_master( overrides=config_overrides, factory_class=SaltMaster, base_script_args=["--log-level=debug"], - image="ghcr.io/saltstack/salt-ci-containers/salt:current", + image=curr_container_image, container_run_kwargs={ "network": docker_network_name, "hostname": curr_master_id, @@ -261,8 +259,8 @@ def curr_master( }, }, start_timeout=120, - max_start_attempts=1, - pull_before_start=True, + max_start_attempts=3, + pull_before_start=False, skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) @@ -301,6 +299,7 @@ def curr_minion( curr_master, host_docker_network_ip_address, docker_network_name, + curr_container_image, ): config_overrides = { "master": curr_master.id, @@ -317,7 +316,7 @@ def curr_minion( overrides=config_overrides, factory_class=SaltMinion, base_script_args=["--log-level=debug"], - image="ghcr.io/saltstack/salt-ci-containers/salt:current", + image=curr_container_image, container_run_kwargs={ "network": docker_network_name, "hostname": curr_minion_id, @@ -327,8 +326,8 @@ def curr_minion( }, }, start_timeout=120, - max_start_attempts=1, - pull_before_start=True, + max_start_attempts=3, + pull_before_start=False, skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) From a1bf32c8814eed0514a730be68234d351867995f Mon Sep 17 00:00:00 2001 From: Pedro Algarvio Date: Sun, 26 Nov 2023 11:12:51 +0000 Subject: [PATCH 196/196] Skip performance tests on PhotonOS. They were also getting skipped previously. Signed-off-by: Pedro Algarvio --- tests/pytests/scenarios/performance/test_performance.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/pytests/scenarios/performance/test_performance.py b/tests/pytests/scenarios/performance/test_performance.py index 12749a6afd8..e9e0d0def65 100644 --- a/tests/pytests/scenarios/performance/test_performance.py +++ b/tests/pytests/scenarios/performance/test_performance.py @@ -14,6 +14,7 @@ from tests.conftest import CODE_DIR log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_photonos, pytest.mark.skip_if_binaries_missing("docker"), ]