mirror of
https://github.com/saltstack/salt.git
synced 2025-04-15 17:20:19 +00:00
Merge 3006.x into master
This commit is contained in:
commit
4886b6de2d
86 changed files with 3005 additions and 2519 deletions
|
@ -1190,7 +1190,7 @@ repos:
|
|||
|
||||
- repo: https://github.com/s0undt3ch/salt-rewrite
|
||||
# Automatically rewrite code with known rules
|
||||
rev: 2.0.0
|
||||
rev: 2.4.3
|
||||
hooks:
|
||||
- id: salt-rewrite
|
||||
alias: rewrite-docstrings
|
||||
|
@ -1202,10 +1202,6 @@ repos:
|
|||
salt/ext/.*
|
||||
)$
|
||||
|
||||
- repo: https://github.com/s0undt3ch/salt-rewrite
|
||||
# Automatically rewrite code with known rules
|
||||
rev: 2.0.0
|
||||
hooks:
|
||||
- id: salt-rewrite
|
||||
alias: rewrite-tests
|
||||
name: Rewrite Salt's Test Suite
|
||||
|
|
1
changelog/61049.fixed.md
Normal file
1
changelog/61049.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Do not update the credentials dictionary in `utils/aws.py` while iterating over it, and use the correct delete functionality
|
2
changelog/63296.fixed.md
Normal file
2
changelog/63296.fixed.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
Fixes an issue with failing subsequent state runs with the lgpo state module.
|
||||
The ``lgpo.get_polcy`` function now returns all boolean settings.
|
2
changelog/63473.fixed.md
Normal file
2
changelog/63473.fixed.md
Normal file
|
@ -0,0 +1,2 @@
|
|||
Fixes an issue with boolean settings not being reported after being set. The
|
||||
``lgpo.get_polcy`` function now returns all boolean settings.
|
1
changelog/64237.fixed.md
Normal file
1
changelog/64237.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
remove the hard coded python version in error.
|
1
changelog/64280.fixed.md
Normal file
1
changelog/64280.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fixed file client private attribute reference on `SaltMakoTemplateLookup`
|
1
changelog/64318.fixed.md
Normal file
1
changelog/64318.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Ensure selinux values are handled lowercase
|
5
changelog/64401.fixed.md
Normal file
5
changelog/64401.fixed.md
Normal file
|
@ -0,0 +1,5 @@
|
|||
Fixed an issue with ``lgpo_reg`` where existing entries for the same key in
|
||||
``Registry.pol`` were being overwritten in subsequent runs if the value name in
|
||||
the subesequent run was contained in the existing value name. For example, a
|
||||
key named ``SetUpdateNotificationLevel`` would be overwritten by a subsequent
|
||||
run attempting to set ``UpdateNotificationLevel``
|
1
changelog/64430.fixed.md
Normal file
1
changelog/64430.fixed.md
Normal file
|
@ -0,0 +1 @@
|
|||
Fix regression for user.present on handling groups with dupe GIDs
|
|
@ -46,3 +46,4 @@ xmldiff>=2.4
|
|||
genshi>=0.7.3
|
||||
cheetah3>=3.2.2
|
||||
mako
|
||||
wempy
|
||||
|
|
|
@ -555,6 +555,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -544,6 +544,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -607,6 +607,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -483,6 +483,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -551,6 +551,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -540,6 +540,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -601,6 +601,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -479,6 +479,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -547,6 +547,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -611,6 +611,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -491,6 +491,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -555,6 +555,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -544,6 +544,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -609,6 +609,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -487,6 +487,8 @@ websocket-client==0.40.0
|
|||
# via
|
||||
# docker
|
||||
# kubernetes
|
||||
wempy==0.2.1
|
||||
# via -r requirements/static/ci/common.in
|
||||
werkzeug==2.2.3
|
||||
# via
|
||||
# moto
|
||||
|
|
|
@ -87,7 +87,7 @@ class DeferredStreamHandler(StreamHandler):
|
|||
If anything goes wrong before logging is properly setup, all stored messages
|
||||
will be flushed to the handler's stream, ie, written to console.
|
||||
|
||||
.. versionadded:: 3005.0
|
||||
.. versionadded:: 3005
|
||||
"""
|
||||
|
||||
def __init__(self, stream, max_queue_size=10000):
|
||||
|
|
2
salt/cache/consul.py
vendored
2
salt/cache/consul.py
vendored
|
@ -3,7 +3,7 @@ Minion data cache plugin for Consul key/value data store.
|
|||
|
||||
.. versionadded:: 2016.11.2
|
||||
|
||||
.. versionchanged:: 3005.0
|
||||
.. versionchanged:: 3005
|
||||
|
||||
Timestamp/cache updated support added.
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ except ImportError:
|
|||
|
||||
def get(key, default="", merge=False, delimiter=DEFAULT_TARGET_DELIM):
|
||||
"""
|
||||
.. versionadded:: 0.14
|
||||
.. versionadded:: 0.14.0
|
||||
|
||||
Attempt to retrieve the named value from pillar, if the named value is not
|
||||
available return the passed default. The default return is an empty string.
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
CenturyLink Cloud Module
|
||||
========================
|
||||
|
||||
.. versionadded:: 2018.3
|
||||
.. versionadded:: 2018.3.0
|
||||
|
||||
The CLC cloud module allows you to manage CLC Via the CLC SDK.
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
Grains from cloud metadata servers at 169.254.169.254 in
|
||||
google compute engine
|
||||
|
||||
.. versionadded:: 3005.0
|
||||
.. versionadded:: 3005
|
||||
|
||||
:depends: requests
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
ACME / Let's Encrypt module
|
||||
===========================
|
||||
|
||||
.. versionadded:: 2016.3
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
This module currently looks for certbot script in the $PATH as
|
||||
- certbot,
|
||||
|
|
|
@ -3517,7 +3517,7 @@ def _get_http_proxy_url():
|
|||
|
||||
def list_downloaded(root=None, **kwargs):
|
||||
"""
|
||||
.. versionadded:: 3000?
|
||||
.. versionadded:: 3000
|
||||
|
||||
List prefetched packages downloaded by apt in the local disk.
|
||||
|
||||
|
|
|
@ -1019,7 +1019,7 @@ def clone(
|
|||
https_user
|
||||
Set HTTP Basic Auth username. Only accepted for HTTPS URLs.
|
||||
|
||||
.. versionadded:: 20515.5.0
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
https_pass
|
||||
Set HTTP Basic Auth password. Only accepted for HTTPS URLs.
|
||||
|
|
|
@ -89,7 +89,7 @@ def get_str(
|
|||
):
|
||||
"""
|
||||
.. versionadded:: 2014.7.0
|
||||
.. versionchanged:: 3004.0
|
||||
.. versionchanged:: 3004
|
||||
|
||||
Changed the default character set used to include symbols and implemented arguments to control the used character set.
|
||||
|
||||
|
@ -99,14 +99,14 @@ def get_str(
|
|||
Any valid number of bytes.
|
||||
|
||||
chars : None
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
String with any character that should be used to generate random string.
|
||||
|
||||
This argument supersedes all other character controlling arguments.
|
||||
|
||||
lowercase : True
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Use lowercase letters in generated random string.
|
||||
(see :py:data:`string.ascii_lowercase`)
|
||||
|
@ -114,7 +114,7 @@ def get_str(
|
|||
This argument is superseded by chars.
|
||||
|
||||
uppercase : True
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Use uppercase letters in generated random string.
|
||||
(see :py:data:`string.ascii_uppercase`)
|
||||
|
@ -122,7 +122,7 @@ def get_str(
|
|||
This argument is superseded by chars.
|
||||
|
||||
digits : True
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Use digits in generated random string.
|
||||
(see :py:data:`string.digits`)
|
||||
|
@ -130,7 +130,7 @@ def get_str(
|
|||
This argument is superseded by chars.
|
||||
|
||||
printable : False
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Use printable characters in generated random string and includes lowercase, uppercase,
|
||||
digits, punctuation and whitespace.
|
||||
|
@ -143,7 +143,7 @@ def get_str(
|
|||
This argument is superseded by chars.
|
||||
|
||||
punctuation : True
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Use punctuation characters in generated random string.
|
||||
(see :py:data:`string.punctuation`)
|
||||
|
@ -151,7 +151,7 @@ def get_str(
|
|||
This argument is superseded by chars.
|
||||
|
||||
whitespace : False
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Use whitespace characters in generated random string.
|
||||
(see :py:data:`string.whitespace`)
|
||||
|
|
|
@ -193,11 +193,11 @@ def bridge_create(br, may_exist=True, parent=None, vlan=None):
|
|||
parent : string
|
||||
name of the parent bridge (if the bridge shall be created as a fake
|
||||
bridge). If specified, vlan must also be specified.
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
vlan : int
|
||||
VLAN ID of the bridge (if the bridge shall be created as a fake
|
||||
bridge). If specified, parent must also be specified.
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
Returns:
|
||||
True on success, else False.
|
||||
|
@ -252,7 +252,7 @@ def bridge_delete(br, if_exists=True):
|
|||
|
||||
def bridge_to_parent(br):
|
||||
"""
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
Returns the parent bridge of a bridge.
|
||||
|
||||
|
@ -280,7 +280,7 @@ def bridge_to_parent(br):
|
|||
|
||||
def bridge_to_vlan(br):
|
||||
"""
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
Returns the VLAN ID of a bridge.
|
||||
|
||||
|
@ -599,7 +599,7 @@ def port_create_vxlan(br, port, id, remote, dst_port=None):
|
|||
|
||||
def db_get(table, record, column, if_exists=False):
|
||||
"""
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
Gets a column's value for a specific record.
|
||||
|
||||
|
@ -638,7 +638,7 @@ def db_get(table, record, column, if_exists=False):
|
|||
|
||||
def db_set(table, record, column, value, if_exists=False):
|
||||
"""
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
Sets a column's value for a specific record.
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ def get(
|
|||
saltenv=None,
|
||||
):
|
||||
"""
|
||||
.. versionadded:: 0.14
|
||||
.. versionadded:: 0.14.0
|
||||
|
||||
Attempt to retrieve the named value from :ref:`in-memory pillar data
|
||||
<pillar-in-memory>`. If the pillar key is not present in the in-memory
|
||||
|
|
|
@ -545,7 +545,7 @@ def delete(name, **kwargs):
|
|||
"""
|
||||
Delete the named service
|
||||
|
||||
.. versionadded:: 2016.3
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
|
|
@ -135,13 +135,13 @@ def setenforce(mode):
|
|||
if isinstance(mode, str):
|
||||
if mode.lower() == "enforcing":
|
||||
mode = "1"
|
||||
modestring = "Enforcing"
|
||||
modestring = "enforcing"
|
||||
elif mode.lower() == "permissive":
|
||||
mode = "0"
|
||||
modestring = "Permissive"
|
||||
modestring = "permissive"
|
||||
elif mode.lower() == "disabled":
|
||||
mode = "0"
|
||||
modestring = "Disabled"
|
||||
modestring = "disabled"
|
||||
else:
|
||||
return "Invalid mode {}".format(mode)
|
||||
elif isinstance(mode, int):
|
||||
|
|
|
@ -546,7 +546,7 @@ def list_state_functions(*args, **kwargs): # pylint: disable=unused-argument
|
|||
salt '*' sys.list_state_functions 'file.*'
|
||||
salt '*' sys.list_state_functions 'file.s*'
|
||||
|
||||
.. versionadded:: 2016.9
|
||||
.. versionadded:: 2016.9.0
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
|
|
|
@ -6927,7 +6927,7 @@ def _checkAllAdmxPolicies(
|
|||
|
||||
if etree.QName(child_item).localname == "boolean":
|
||||
# https://msdn.microsoft.com/en-us/library/dn605978(v=vs.85).aspx
|
||||
if child_item is not None:
|
||||
if len(child_item) > 0:
|
||||
if (
|
||||
TRUE_VALUE_XPATH(child_item)
|
||||
and this_element_name not in configured_elements
|
||||
|
@ -9195,7 +9195,7 @@ def _get_policy_adm_setting(
|
|||
)
|
||||
if etree.QName(child_item).localname == "boolean":
|
||||
# https://msdn.microsoft.com/en-us/library/dn605978(v=vs.85).aspx
|
||||
if child_item is not None:
|
||||
if len(child_item) > 0:
|
||||
if (
|
||||
TRUE_VALUE_XPATH(child_item)
|
||||
and this_element_name not in configured_elements
|
||||
|
|
|
@ -367,29 +367,40 @@ def set_value(
|
|||
if key.lower() == p_key.lower():
|
||||
found_key = p_key
|
||||
for p_name in pol_data[p_key]:
|
||||
if v_name.lower() in p_name.lower():
|
||||
if v_name.lower() == p_name.lower().lstrip("**del."):
|
||||
found_name = p_name
|
||||
|
||||
if found_key:
|
||||
if found_name:
|
||||
if "**del." in found_name:
|
||||
log.debug(f"LGPO_REG Mod: Found disabled name: {found_name}")
|
||||
pol_data[found_key][v_name] = pol_data[found_key].pop(found_name)
|
||||
found_name = v_name
|
||||
log.debug(f"LGPO_REG Mod: Updating value: {found_name}")
|
||||
pol_data[found_key][found_name] = {"data": v_data, "type": v_type}
|
||||
else:
|
||||
log.debug(f"LGPO_REG Mod: Setting new value: {found_name}")
|
||||
pol_data[found_key][v_name] = {"data": v_data, "type": v_type}
|
||||
else:
|
||||
log.debug(f"LGPO_REG Mod: Adding new key and value: {found_name}")
|
||||
pol_data[key] = {v_name: {"data": v_data, "type": v_type}}
|
||||
|
||||
write_reg_pol(pol_data, policy_class=policy_class)
|
||||
success = True
|
||||
if not write_reg_pol(pol_data, policy_class=policy_class):
|
||||
log.error("LGPO_REG Mod: Failed to write registry.pol file")
|
||||
success = False
|
||||
|
||||
return salt.utils.win_reg.set_value(
|
||||
if not salt.utils.win_reg.set_value(
|
||||
hive=hive,
|
||||
key=key,
|
||||
vname=v_name,
|
||||
vdata=v_data,
|
||||
vtype=v_type,
|
||||
)
|
||||
):
|
||||
log.error("LGPO_REG Mod: Failed to set registry entry")
|
||||
success = False
|
||||
|
||||
return success
|
||||
|
||||
|
||||
def disable_value(key, v_name, policy_class="machine"):
|
||||
|
@ -445,28 +456,42 @@ def disable_value(key, v_name, policy_class="machine"):
|
|||
if key.lower() == p_key.lower():
|
||||
found_key = p_key
|
||||
for p_name in pol_data[p_key]:
|
||||
if v_name.lower() in p_name.lower():
|
||||
if v_name.lower() == p_name.lower().lstrip("**del."):
|
||||
found_name = p_name
|
||||
|
||||
if found_key:
|
||||
if found_name:
|
||||
if "**del." in found_name:
|
||||
# Already set to delete... do nothing
|
||||
log.debug(f"LGPO_REG Mod: Already disabled: {v_name}")
|
||||
return None
|
||||
log.debug(f"LGPO_REG Mod: Disabling value name: {v_name}")
|
||||
pol_data[found_key].pop(found_name)
|
||||
found_name = "**del.{}".format(found_name)
|
||||
pol_data[found_key][found_name] = {"data": " ", "type": "REG_SZ"}
|
||||
else:
|
||||
log.debug(f"LGPO_REG Mod: Setting new disabled value name: {v_name}")
|
||||
pol_data[found_key]["**del.{}".format(v_name)] = {
|
||||
"data": " ",
|
||||
"type": "REG_SZ",
|
||||
}
|
||||
else:
|
||||
log.debug(f"LGPO_REG Mod: Adding new key and disabled value name: {found_name}")
|
||||
pol_data[key] = {"**del.{}".format(v_name): {"data": " ", "type": "REG_SZ"}}
|
||||
|
||||
write_reg_pol(pol_data, policy_class=policy_class)
|
||||
success = True
|
||||
if not write_reg_pol(pol_data, policy_class=policy_class):
|
||||
log.error("LGPO_REG Mod: Failed to write registry.pol file")
|
||||
success = False
|
||||
|
||||
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
||||
ret = salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
||||
if not ret:
|
||||
if ret is None:
|
||||
log.debug("LGPO_REG Mod: Registry key/value already missing")
|
||||
else:
|
||||
log.error("LGPO_REG Mod: Failed to remove registry entry")
|
||||
success = False
|
||||
|
||||
return success
|
||||
|
||||
|
||||
def delete_value(key, v_name, policy_class="Machine"):
|
||||
|
@ -523,20 +548,37 @@ def delete_value(key, v_name, policy_class="Machine"):
|
|||
if key.lower() == p_key.lower():
|
||||
found_key = p_key
|
||||
for p_name in pol_data[p_key]:
|
||||
if v_name.lower() in p_name.lower():
|
||||
if v_name.lower() == p_name.lower().lstrip("**del."):
|
||||
found_name = p_name
|
||||
|
||||
if found_key:
|
||||
if found_name:
|
||||
log.debug(f"LGPO_REG Mod: Removing value name: {found_name}")
|
||||
pol_data[found_key].pop(found_name)
|
||||
else:
|
||||
log.debug(f"LGPO_REG Mod: Value name not found: {v_name}")
|
||||
return None
|
||||
if len(pol_data[found_key]) == 0:
|
||||
log.debug(f"LGPO_REG Mod: Removing empty key: {found_key}")
|
||||
pol_data.pop(found_key)
|
||||
else:
|
||||
log.debug(f"LGPO_REG Mod: Key not found: {key}")
|
||||
return None
|
||||
|
||||
write_reg_pol(pol_data, policy_class=policy_class)
|
||||
success = True
|
||||
if not write_reg_pol(pol_data, policy_class=policy_class):
|
||||
log.error("LGPO_REG Mod: Failed to write registry.pol file")
|
||||
success = False
|
||||
|
||||
return salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
||||
ret = salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name)
|
||||
if not ret:
|
||||
if ret is None:
|
||||
log.debug("LGPO_REG Mod: Registry key/value already missing")
|
||||
else:
|
||||
log.error("LGPO_REG Mod: Failed to remove registry entry")
|
||||
success = False
|
||||
|
||||
return success
|
||||
|
||||
|
||||
# This is for testing different settings and verifying that we are writing the
|
||||
|
|
|
@ -755,7 +755,7 @@ def join_domain(
|
|||
``True`` will restart the computer after a successful join. Default
|
||||
is ``False``
|
||||
|
||||
.. versionadded:: 2015.8.2/2015.5.7
|
||||
.. versionadded:: 2015.5.7,2015.8.2
|
||||
|
||||
Returns:
|
||||
dict: Returns a dictionary if successful, otherwise ``False``
|
||||
|
@ -889,7 +889,7 @@ def unjoin_domain(
|
|||
workgroup (str):
|
||||
The workgroup to join the computer to. Default is ``WORKGROUP``
|
||||
|
||||
.. versionadded:: 2015.8.2/2015.5.7
|
||||
.. versionadded:: 2015.5.7,2015.8.2
|
||||
|
||||
disable (bool):
|
||||
``True`` to disable the computer account in Active Directory.
|
||||
|
@ -899,7 +899,7 @@ def unjoin_domain(
|
|||
``True`` will restart the computer after successful unjoin. Default
|
||||
is ``False``
|
||||
|
||||
.. versionadded:: 2015.8.2/2015.5.7
|
||||
.. versionadded:: 2015.5.7,2015.8.2
|
||||
|
||||
Returns:
|
||||
dict: Returns a dictionary if successful, otherwise ``False``
|
||||
|
|
|
@ -1905,7 +1905,7 @@ def upgrade(
|
|||
Disable exclude from main, for a repo or for everything.
|
||||
(e.g., ``yum --disableexcludes='main'``)
|
||||
|
||||
.. versionadded:: 2014.7
|
||||
.. versionadded:: 2014.7.0
|
||||
|
||||
name
|
||||
The name of the package to be upgraded. Note that this parameter is
|
||||
|
@ -2446,7 +2446,7 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06
|
|||
|
||||
def list_holds(pattern=__HOLD_PATTERN, full=True):
|
||||
r"""
|
||||
.. versionchanged:: 2016.3.0,2015.8.4,2015.5.10
|
||||
.. versionchanged:: 2015.5.10,2015.8.4,2016.3.0
|
||||
Function renamed from ``pkg.get_locked_pkgs`` to ``pkg.list_holds``.
|
||||
|
||||
List information on locked packages
|
||||
|
@ -2573,7 +2573,7 @@ def group_list():
|
|||
def group_info(name, expand=False, ignore_groups=None):
|
||||
"""
|
||||
.. versionadded:: 2014.1.0
|
||||
.. versionchanged:: 3001,2016.3.0,2015.8.4,2015.5.10
|
||||
.. versionchanged:: 2015.5.10,2015.8.4,2016.3.0,3001
|
||||
The return data has changed. A new key ``type`` has been added to
|
||||
distinguish environment groups from package groups. Also, keys for the
|
||||
group name and group ID have been added. The ``mandatory packages``,
|
||||
|
@ -2683,7 +2683,7 @@ def group_info(name, expand=False, ignore_groups=None):
|
|||
def group_diff(name):
|
||||
"""
|
||||
.. versionadded:: 2014.1.0
|
||||
.. versionchanged:: 2016.3.0,2015.8.4,2015.5.10
|
||||
.. versionchanged:: 2015.5.10,2015.8.4,2016.3.0
|
||||
Environment groups are now supported. The key names have been renamed,
|
||||
similar to the changes made in :py:func:`pkg.group_info
|
||||
<salt.modules.yumpkg.group_info>`.
|
||||
|
|
|
@ -146,7 +146,7 @@ def _query(method, params, url, auth=None):
|
|||
|
||||
:return: Response from API with desired data in JSON format. In case of error returns more specific description.
|
||||
|
||||
.. versionchanged:: 2017.7
|
||||
.. versionchanged:: 2017.7.0
|
||||
"""
|
||||
|
||||
unauthenticated_methods = [
|
||||
|
@ -311,7 +311,7 @@ def _map_to_list_of_dicts(source, key):
|
|||
|
||||
def get_zabbix_id_mapper():
|
||||
"""
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
Make ZABBIX_ID_MAPPER constant available to state modules.
|
||||
|
||||
|
@ -328,7 +328,7 @@ def get_zabbix_id_mapper():
|
|||
|
||||
def substitute_params(input_object, extend_params=None, filter_key="name", **kwargs):
|
||||
"""
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
Go through Zabbix object params specification and if needed get given object ID from Zabbix API and put it back
|
||||
as a value. Definition of the object is done via dict with keys "query_object" and "query_name".
|
||||
|
@ -385,7 +385,7 @@ def substitute_params(input_object, extend_params=None, filter_key="name", **kwa
|
|||
# pylint: disable=too-many-return-statements,too-many-nested-blocks
|
||||
def compare_params(defined, existing, return_old_value=False):
|
||||
"""
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
Compares Zabbix object definition against existing Zabbix object.
|
||||
|
||||
|
@ -471,7 +471,7 @@ def compare_params(defined, existing, return_old_value=False):
|
|||
|
||||
def get_object_id_by_params(obj, params=None, **connection_args):
|
||||
"""
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
Get ID of single Zabbix object specified by its name.
|
||||
|
||||
|
@ -2703,7 +2703,7 @@ def run_query(method, params, **connection_args):
|
|||
|
||||
def configuration_import(config_file, rules=None, file_format="xml", **connection_args):
|
||||
"""
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
Imports Zabbix configuration specified in file to Zabbix server.
|
||||
|
||||
|
|
|
@ -56,28 +56,28 @@ site_prefixes: ``True``
|
|||
Whether should retrieve the prefixes of the site the device belongs to.
|
||||
|
||||
devices: ``True``
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Whether should retrieve physical devices.
|
||||
|
||||
virtual_machines: ``False``
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Whether should retrieve virtual machines.
|
||||
|
||||
interfaces: ``False``
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Whether should retrieve the interfaces of the device.
|
||||
|
||||
interface_ips: ``False``
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
Whether should retrieve the IP addresses for interfaces of the device.
|
||||
(interfaces must be set to True as well)
|
||||
|
||||
api_query_result_limit: ``Use NetBox default``
|
||||
.. versionadded:: 3004.0
|
||||
.. versionadded:: 3004
|
||||
|
||||
An integer specifying how many results should be returned for each query
|
||||
to the NetBox API. Leaving this unset will use NetBox's default value.
|
||||
|
|
|
@ -7,16 +7,16 @@ Flat inventory files should be in the regular ansible inventory format.
|
|||
|
||||
# /tmp/example_roster
|
||||
[servers]
|
||||
salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass='password'
|
||||
salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass='password' ansible_sudo_pass='password'
|
||||
|
||||
[desktop]
|
||||
home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass='password'
|
||||
home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass='password' ansible_sudo_pass='password'
|
||||
|
||||
[computers:children]
|
||||
desktop
|
||||
servers
|
||||
|
||||
[names:vars]
|
||||
[computers:vars]
|
||||
http_port=80
|
||||
|
||||
then salt-ssh can be used to hit any of them
|
||||
|
@ -47,35 +47,40 @@ There is also the option of specifying a dynamic inventory, and generating it on
|
|||
#!/bin/bash
|
||||
# filename: /etc/salt/hosts
|
||||
echo '{
|
||||
"servers": [
|
||||
"salt.gtmanfred.com"
|
||||
],
|
||||
"desktop": [
|
||||
"home"
|
||||
],
|
||||
"computers": {
|
||||
"hosts": [],
|
||||
"children": [
|
||||
"desktop",
|
||||
"servers"
|
||||
]
|
||||
},
|
||||
"_meta": {
|
||||
"hostvars": {
|
||||
"salt.gtmanfred.com": {
|
||||
"ansible_ssh_user": "gtmanfred",
|
||||
"ansible_ssh_host": "127.0.0.1",
|
||||
"ansible_sudo_pass": "password",
|
||||
"ansible_ssh_port": 22
|
||||
},
|
||||
"home": {
|
||||
"ansible_ssh_user": "gtmanfred",
|
||||
"ansible_ssh_host": "12.34.56.78",
|
||||
"ansible_sudo_pass": "password",
|
||||
"ansible_ssh_port": 23
|
||||
}
|
||||
"servers": [
|
||||
"salt.gtmanfred.com"
|
||||
],
|
||||
"desktop": [
|
||||
"home"
|
||||
],
|
||||
"computers": {
|
||||
"hosts": [],
|
||||
"children": [
|
||||
"desktop",
|
||||
"servers"
|
||||
],
|
||||
"vars": {
|
||||
"http_port": 80
|
||||
}
|
||||
},
|
||||
"_meta": {
|
||||
"hostvars": {
|
||||
"salt.gtmanfred.com": {
|
||||
"ansible_ssh_user": "gtmanfred",
|
||||
"ansible_ssh_host": "127.0.0.1",
|
||||
"ansible_sudo_pass": "password",
|
||||
"ansible_ssh_pass": "password",
|
||||
"ansible_ssh_port": 22
|
||||
},
|
||||
"home": {
|
||||
"ansible_ssh_user": "gtmanfred",
|
||||
"ansible_ssh_host": "12.34.56.78",
|
||||
"ansible_sudo_pass": "password",
|
||||
"ansible_ssh_pass": "password",
|
||||
"ansible_ssh_port": 23
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
|
||||
This is the format that an inventory script needs to output to work with ansible, and thus here.
|
||||
|
|
|
@ -86,7 +86,7 @@ def orchestrate(
|
|||
|
||||
Runner uses the pillar variable
|
||||
|
||||
.. versionchanged:: 2017.5
|
||||
.. versionchanged:: 2017.5.0
|
||||
|
||||
Runner uses the pillar_enc variable that allows renderers to render the pillar.
|
||||
This is usable when supplying the contents of a file as pillar, and the file contains
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
ACME / Let's Encrypt certificate management state
|
||||
=================================================
|
||||
|
||||
.. versionadded:: 2016.3
|
||||
.. versionadded:: 2016.3.0
|
||||
|
||||
See also the module documentation
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ the above word between angle brackets (<>).
|
|||
- FollowSymlinks
|
||||
AllowOverride: All
|
||||
|
||||
.. versionchanged:: 2018.3
|
||||
.. versionchanged:: 2018.3.0
|
||||
|
||||
Allows having the same section container multiple times (e.g. <Directory /path/to/dir>).
|
||||
|
||||
|
|
|
@ -1987,7 +1987,7 @@ def tidied(
|
|||
**kwargs
|
||||
):
|
||||
"""
|
||||
.. versionchanged:: 3006.0,3005
|
||||
.. versionchanged:: 3005,3006.0
|
||||
|
||||
Remove unwanted files based on specific criteria.
|
||||
|
||||
|
|
|
@ -274,7 +274,7 @@ def lv_present(
|
|||
force
|
||||
Assume yes to all prompts
|
||||
|
||||
.. versionadded:: 3002.0
|
||||
.. versionadded:: 3002
|
||||
|
||||
resizefs
|
||||
Use fsadm to resize the logical volume filesystem if needed
|
||||
|
|
|
@ -22,11 +22,11 @@ def present(name, parent=None, vlan=None):
|
|||
parent : string
|
||||
name of the parent bridge (if the bridge shall be created as a fake
|
||||
bridge). If specified, vlan must also be specified.
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
vlan: int
|
||||
VLAN ID of the bridge (if the bridge shall be created as a fake
|
||||
bridge). If specified, parent must also be specified.
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
|
||||
"""
|
||||
ret = {"name": name, "changes": {}, "result": False, "comment": ""}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Management of Open vSwitch database records.
|
||||
|
||||
.. versionadded:: 3006
|
||||
.. versionadded:: 3006.0
|
||||
"""
|
||||
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ Management of Pacemaker/Corosync clusters with PCS
|
|||
A state module to manage Pacemaker/Corosync clusters
|
||||
with the Pacemaker/Corosync configuration system (PCS)
|
||||
|
||||
.. versionadded:: 2016.110
|
||||
.. versionadded:: 2016.11.0
|
||||
|
||||
:depends: pcs
|
||||
|
||||
|
|
|
@ -174,7 +174,7 @@ def _check_pkg_version_format(pkg):
|
|||
|
||||
if not HAS_PIP:
|
||||
ret["comment"] = (
|
||||
"An importable Python 2 pip module is required but could not be "
|
||||
"An importable Python pip module is required but could not be "
|
||||
"found on your system. This usually means that the system's pip "
|
||||
"package is not installed properly."
|
||||
)
|
||||
|
|
|
@ -40,11 +40,29 @@ def _group_changes(cur, wanted, remove=False):
|
|||
"""
|
||||
Determine if the groups need to be changed
|
||||
"""
|
||||
old = set(cur)
|
||||
new = set(wanted)
|
||||
if (remove and old != new) or (not remove and not new.issubset(old)):
|
||||
return True
|
||||
return False
|
||||
cur = set(cur)
|
||||
wanted = set(wanted)
|
||||
|
||||
if cur == wanted or (not remove and wanted.issubset(cur)):
|
||||
return False
|
||||
|
||||
all_grps = {name: __salt__["group.info"](name) for name in cur.union(wanted)}
|
||||
|
||||
if remove:
|
||||
diff = wanted.symmetric_difference(cur)
|
||||
else:
|
||||
diff = wanted.difference(cur)
|
||||
|
||||
remain = list(diff)
|
||||
for diff_grp in diff:
|
||||
for grp, info in all_grps.items():
|
||||
if grp == diff_grp:
|
||||
continue
|
||||
if all_grps[diff_grp]["gid"] == info["gid"]:
|
||||
# dupe detected
|
||||
remain.remove(diff_grp)
|
||||
|
||||
return bool(remain)
|
||||
|
||||
|
||||
def _get_root_args(local):
|
||||
|
@ -110,6 +128,15 @@ def _changes(
|
|||
|
||||
change = {}
|
||||
wanted_groups = sorted(set((groups or []) + (optional_groups or [])))
|
||||
lusr_groups_gids = [
|
||||
__salt__["file.group_to_gid"](gname) for gname in lusr["groups"]
|
||||
]
|
||||
dupe_groups = {}
|
||||
for idx, _gid in enumerate(lusr_groups_gids):
|
||||
if lusr_groups_gids.count(_gid) > 1:
|
||||
if _gid not in dupe_groups:
|
||||
dupe_groups[_gid] = []
|
||||
dupe_groups[_gid].append(lusr["groups"][idx])
|
||||
if not remove_groups:
|
||||
wanted_groups = sorted(set(wanted_groups + lusr["groups"]))
|
||||
if uid and lusr["uid"] != uid:
|
||||
|
@ -119,24 +146,44 @@ def _changes(
|
|||
default_grp = __salt__["file.gid_to_group"](gid if gid is not None else lusr["gid"])
|
||||
old_default_grp = __salt__["file.gid_to_group"](lusr["gid"])
|
||||
# Remove the default group from the list for comparison purposes.
|
||||
if default_grp in lusr["groups"]:
|
||||
lusr["groups"].remove(default_grp)
|
||||
# Remove default group from wanted_groups, as this requirement is
|
||||
# already met
|
||||
if default_grp in lusr["groups"] or default_grp in wanted_groups:
|
||||
if default_grp in salt.utils.data.flatten(dupe_groups.values()):
|
||||
dupe_gid = __salt__["file.group_to_gid"](default_grp)
|
||||
for gname in dupe_groups[dupe_gid]:
|
||||
if gname in lusr["groups"]:
|
||||
lusr["groups"].remove(gname)
|
||||
if gname in wanted_groups:
|
||||
wanted_groups.remove(gname)
|
||||
else:
|
||||
if default_grp in lusr["groups"]:
|
||||
lusr["groups"].remove(default_grp)
|
||||
if default_grp in wanted_groups:
|
||||
wanted_groups.remove(default_grp)
|
||||
# If the group is being changed, make sure that the old primary group is
|
||||
# also removed from the list. Otherwise, if a user's gid is being changed
|
||||
# and their old primary group is reassigned as an additional group, Salt
|
||||
# will not properly detect the need for the change.
|
||||
if old_default_grp != default_grp and old_default_grp in lusr["groups"]:
|
||||
lusr["groups"].remove(old_default_grp)
|
||||
if old_default_grp in salt.utils.data.flatten(dupe_groups.values()):
|
||||
dupe_gid = __salt__["file.group_to_gid"](old_default_grp)
|
||||
for gname in dupe_groups[dupe_gid]:
|
||||
lusr["groups"].remove(gname)
|
||||
else:
|
||||
lusr["groups"].remove(old_default_grp)
|
||||
# If there's a group by the same name as the user, remove it from the list
|
||||
# for comparison purposes.
|
||||
if name in lusr["groups"] and name not in wanted_groups:
|
||||
lusr["groups"].remove(name)
|
||||
# Remove default group from wanted_groups, as this requirement is
|
||||
# already met
|
||||
if default_grp in wanted_groups:
|
||||
wanted_groups.remove(default_grp)
|
||||
if name in salt.utils.data.flatten(dupe_groups.values()):
|
||||
dupe_gid = __salt__["file.group_to_gid"](name)
|
||||
for gname in dupe_groups[dupe_gid]:
|
||||
lusr["groups"].remove(gname)
|
||||
else:
|
||||
lusr["groups"].remove(name)
|
||||
if _group_changes(lusr["groups"], wanted_groups, remove_groups):
|
||||
change["groups"] = wanted_groups
|
||||
if wanted_groups or remove_groups:
|
||||
change["groups"] = wanted_groups
|
||||
if home and lusr["home"] != home:
|
||||
change["home"] = home
|
||||
if createhome:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Management of Zabbix Action object over Zabbix API.
|
||||
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
:codeauthor: Jakub Sliva <jakub.sliva@ultimum.io>
|
||||
"""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
Management of Zabbix Template object over Zabbix API.
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
"""
|
||||
Management of Zabbix Valuemap object over Zabbix API.
|
||||
|
||||
.. versionadded:: 2017.7
|
||||
.. versionadded:: 2017.7.0
|
||||
|
||||
:codeauthor: Jakub Sliva <jakub.sliva@ultimum.io>
|
||||
"""
|
||||
|
|
|
@ -8,6 +8,7 @@ This is a base library used by a number of AWS services.
|
|||
:depends: requests
|
||||
"""
|
||||
|
||||
import copy
|
||||
import hashlib
|
||||
import hmac
|
||||
import logging
|
||||
|
@ -230,9 +231,9 @@ def assumed_creds(prov_dict, role_arn, location=None):
|
|||
# current time in epoch seconds
|
||||
now = time.mktime(datetime.utcnow().timetuple())
|
||||
|
||||
for key, creds in __AssumeCache__.items():
|
||||
for key, creds in copy.deepcopy(__AssumeCache__).items():
|
||||
if (creds["Expiration"] - now) <= 120:
|
||||
__AssumeCache__[key].delete()
|
||||
del __AssumeCache__[key]
|
||||
|
||||
if role_arn in __AssumeCache__:
|
||||
c = __AssumeCache__[role_arn]
|
||||
|
|
|
@ -99,8 +99,10 @@ if HAS_MAKO:
|
|||
)
|
||||
|
||||
def destroy(self):
|
||||
if self.client:
|
||||
if self._file_client:
|
||||
file_client = self._file_client
|
||||
self._file_client = None
|
||||
try:
|
||||
self.client.destroy()
|
||||
file_client.destroy()
|
||||
except AttributeError:
|
||||
pass
|
||||
|
|
|
@ -96,7 +96,7 @@ def read_reg_pol_file(reg_pol_path):
|
|||
"""
|
||||
return_data = None
|
||||
if os.path.exists(reg_pol_path):
|
||||
log.debug("LGPO_REG Utils: Reading from %s", reg_pol_path)
|
||||
log.debug("LGPO_REG Util: Reading from %s", reg_pol_path)
|
||||
with salt.utils.files.fopen(reg_pol_path, "rb") as pol_file:
|
||||
return_data = pol_file.read()
|
||||
return return_data
|
||||
|
|
|
@ -2,7 +2,7 @@ import pytest
|
|||
|
||||
import salt.modules.selinux as selinux
|
||||
from salt.exceptions import SaltInvocationError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.mock import MagicMock, mock_open, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -293,3 +293,86 @@ def test_fcontext_policy_parsing_fail():
|
|||
"retcode": 1,
|
||||
"error": "Unrecognized response from restorecon command.",
|
||||
}
|
||||
|
||||
|
||||
def test_selinux_config_enforcing():
|
||||
"""
|
||||
Test values written to /etc/selinux/config are lowercase
|
||||
"""
|
||||
mock_file = """
|
||||
# This file controls the state of SELinux on the system.
|
||||
# SELINUX= can take one of these three values:
|
||||
# enforcing - SELinux security policy is enforced.
|
||||
# permissive - SELinux prints warnings instead of enforcing.
|
||||
# disabled - No SELinux policy is loaded.
|
||||
## SELINUX=disabled
|
||||
SELINUX=permissive
|
||||
# SELINUXTYPE= can take one of these three values:
|
||||
# targeted - Targeted processes are protected,
|
||||
# minimum - Modification of targeted policy. Only selected processes are protected.
|
||||
# mls - Multi Level Security protection.
|
||||
SELINUXTYPE=targeted
|
||||
|
||||
"""
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=mock_file)) as m_open:
|
||||
selinux.setenforce("Enforcing")
|
||||
writes = m_open.write_calls()
|
||||
assert writes
|
||||
for line in writes:
|
||||
if line.startswith("SELINUX="):
|
||||
assert line == "SELINUX=enforcing"
|
||||
|
||||
|
||||
def test_selinux_config_permissive():
|
||||
"""
|
||||
Test values written to /etc/selinux/config are lowercase
|
||||
"""
|
||||
mock_file = """
|
||||
# This file controls the state of SELinux on the system.
|
||||
# SELINUX= can take one of these three values:
|
||||
# enforcing - SELinux security policy is enforced.
|
||||
# permissive - SELinux prints warnings instead of enforcing.
|
||||
# disabled - No SELinux policy is loaded.
|
||||
SELINUX=disabled
|
||||
# SELINUXTYPE= can take one of these three values:
|
||||
# targeted - Targeted processes are protected,
|
||||
# minimum - Modification of targeted policy. Only selected processes are protected.
|
||||
# mls - Multi Level Security protection.
|
||||
SELINUXTYPE=targeted
|
||||
|
||||
"""
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=mock_file)) as m_open:
|
||||
selinux.setenforce("Permissive")
|
||||
writes = m_open.write_calls()
|
||||
assert writes
|
||||
for line in writes:
|
||||
if line.startswith("SELINUX="):
|
||||
assert line == "SELINUX=permissive"
|
||||
|
||||
|
||||
def test_selinux_config_disabled():
|
||||
"""
|
||||
Test values written to /etc/selinux/config are lowercase
|
||||
"""
|
||||
mock_file = """
|
||||
# This file controls the state of SELinux on the system.
|
||||
# SELINUX= can take one of these three values:
|
||||
# enforcing - SELinux security policy is enforced.
|
||||
# permissive - SELinux prints warnings instead of enforcing.
|
||||
# disabled - No SELinux policy is loaded.
|
||||
## SELINUX=disabled
|
||||
SELINUX=permissive
|
||||
# SELINUXTYPE= can take one of these three values:
|
||||
# targeted - Targeted processes are protected,
|
||||
# minimum - Modification of targeted policy. Only selected processes are protected.
|
||||
# mls - Multi Level Security protection.
|
||||
SELINUXTYPE=targeted
|
||||
|
||||
"""
|
||||
with patch("salt.utils.files.fopen", mock_open(read_data=mock_file)) as m_open:
|
||||
selinux.setenforce("Disabled")
|
||||
writes = m_open.write_calls()
|
||||
assert writes
|
||||
for line in writes:
|
||||
if line.startswith("SELINUX="):
|
||||
assert line == "SELINUX=disabled"
|
||||
|
|
|
@ -107,154 +107,6 @@ def lgpo_bin():
|
|||
yield str(sys_dir / "lgpo.exe")
|
||||
|
||||
|
||||
def test_get_policy_name(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=True,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = "Not Configured"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_id():
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=True,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = "Not Configured"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_name_full_return_full_names(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
key = "Windows Components\\Data Collection and Preview Builds\\{}"
|
||||
expected = {key.format(policy_name): "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_id_full_return_full_names(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
key = "Windows Components\\Data Collection and Preview Builds\\{}"
|
||||
expected = {key.format(policy_name): "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_name_full_return_ids(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=False,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = {"AllowTelemetry": "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_id_full_return_ids():
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=False,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = {"AllowTelemetry": "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_id_full_return_ids_hierarchical():
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=False,
|
||||
hierarchical_return=True,
|
||||
)
|
||||
expected = {
|
||||
"Computer Configuration": {
|
||||
"Administrative Templates": {
|
||||
"WindowsComponents": {
|
||||
"DataCollectionAndPreviewBuilds": {
|
||||
"AllowTelemetry": "Not Configured"
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_get_policy_name_return_full_names_hierarchical(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=True,
|
||||
)
|
||||
expected = {
|
||||
"Computer Configuration": {
|
||||
"Administrative Templates": {
|
||||
"Windows Components": {
|
||||
"Data Collection and Preview Builds": {
|
||||
policy_name: "Not Configured"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert result == expected
|
||||
|
||||
|
||||
@pytest.mark.destructive_test
|
||||
def test__load_policy_definitions():
|
||||
"""
|
||||
|
|
229
tests/pytests/unit/modules/win_lgpo/test_get_policy.py
Normal file
229
tests/pytests/unit/modules/win_lgpo/test_get_policy.py
Normal file
|
@ -0,0 +1,229 @@
|
|||
import copy
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.grains.core
|
||||
import salt.modules.win_file as win_file
|
||||
import salt.modules.win_lgpo as win_lgpo
|
||||
import salt.utils.files
|
||||
import salt.utils.win_dacl as win_dacl
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.windows_whitelisted,
|
||||
pytest.mark.skip_unless_on_windows,
|
||||
pytest.mark.slow_test,
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(minion_opts):
|
||||
return {
|
||||
win_lgpo: {
|
||||
"__opts__": minion_opts,
|
||||
"__salt__": {
|
||||
"file.file_exists": win_file.file_exists,
|
||||
"file.makedirs": win_file.makedirs_,
|
||||
},
|
||||
},
|
||||
win_file: {
|
||||
"__utils__": {
|
||||
"dacl.set_perms": win_dacl.set_perms,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def osrelease():
|
||||
grains = salt.grains.core.os_data()
|
||||
yield grains.get("osrelease", None)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def clean_comp():
|
||||
reg_pol = pathlib.Path(
|
||||
os.getenv("SystemRoot"), "System32", "GroupPolicy", "Machine", "Registry.pol"
|
||||
)
|
||||
reg_pol.unlink(missing_ok=True)
|
||||
try:
|
||||
yield reg_pol
|
||||
finally:
|
||||
reg_pol.unlink(missing_ok=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def checkbox_policy():
|
||||
policy_name = "Configure Corporate Windows Error Reporting"
|
||||
policy_settings = {
|
||||
"Connect using SSL": False,
|
||||
"Corporate server name": "fakeserver.com",
|
||||
"Only upload on free networks": False,
|
||||
"Server port": 1273,
|
||||
}
|
||||
win_lgpo.set_computer_policy(name=policy_name, setting=copy.copy(policy_settings))
|
||||
try:
|
||||
yield policy_name, policy_settings
|
||||
finally:
|
||||
win_lgpo.set_computer_policy(name=policy_name, setting="Not Configured")
|
||||
|
||||
|
||||
def test_name(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=True,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = "Not Configured"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_id():
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=True,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = "Not Configured"
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_name_full_return_full_names(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
key = "Windows Components\\Data Collection and Preview Builds\\{}"
|
||||
expected = {key.format(policy_name): "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_id_full_return_full_names(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
key = "Windows Components\\Data Collection and Preview Builds\\{}"
|
||||
expected = {key.format(policy_name): "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_name_full_return_ids(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=False,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = {"AllowTelemetry": "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_id_full_return_ids():
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=False,
|
||||
hierarchical_return=False,
|
||||
)
|
||||
expected = {"AllowTelemetry": "Not Configured"}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_id_full_return_ids_hierarchical():
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name="AllowTelemetry",
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=False,
|
||||
hierarchical_return=True,
|
||||
)
|
||||
expected = {
|
||||
"Computer Configuration": {
|
||||
"Administrative Templates": {
|
||||
"WindowsComponents": {
|
||||
"DataCollectionAndPreviewBuilds": {
|
||||
"AllowTelemetry": "Not Configured"
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_name_return_full_names_hierarchical(osrelease):
|
||||
if osrelease == "2022Server":
|
||||
pytest.skip(f"Test is failing on {osrelease}")
|
||||
if osrelease == "11":
|
||||
policy_name = "Allow Diagnostic Data"
|
||||
else:
|
||||
policy_name = "Allow Telemetry"
|
||||
result = win_lgpo.get_policy(
|
||||
policy_name=policy_name,
|
||||
policy_class="machine",
|
||||
return_value_only=False,
|
||||
return_full_policy_names=True,
|
||||
hierarchical_return=True,
|
||||
)
|
||||
expected = {
|
||||
"Computer Configuration": {
|
||||
"Administrative Templates": {
|
||||
"Windows Components": {
|
||||
"Data Collection and Preview Builds": {
|
||||
policy_name: "Not Configured"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_checkboxes(checkbox_policy):
|
||||
"""
|
||||
Test scenario where sometimes checkboxes aren't returned in the results
|
||||
"""
|
||||
policy_name, expected = checkbox_policy
|
||||
result = win_lgpo.get_policy(policy_name=policy_name, policy_class="Machine")
|
||||
assert result == expected
|
|
@ -62,6 +62,28 @@ def expected_targets_return():
|
|||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def expected_docs_targets_return():
|
||||
return {
|
||||
"home": {
|
||||
"passwd": "password",
|
||||
"sudo": "password",
|
||||
"host": "12.34.56.78",
|
||||
"port": 23,
|
||||
"user": "gtmanfred",
|
||||
"minion_opts": {"http_port": 80},
|
||||
},
|
||||
"salt.gtmanfred.com": {
|
||||
"passwd": "password",
|
||||
"sudo": "password",
|
||||
"host": "127.0.0.1",
|
||||
"port": 22,
|
||||
"user": "gtmanfred",
|
||||
"minion_opts": {"http_port": 80},
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def roster_dir(tmp_path_factory):
|
||||
dpath = tmp_path_factory.mktemp("roster")
|
||||
|
@ -136,6 +158,59 @@ def roster_dir(tmp_path_factory):
|
|||
children:
|
||||
southeast:
|
||||
"""
|
||||
docs_ini_contents = """
|
||||
[servers]
|
||||
salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass='password' ansible_sudo_pass='password'
|
||||
|
||||
[desktop]
|
||||
home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass='password' ansible_sudo_pass='password'
|
||||
|
||||
[computers:children]
|
||||
desktop
|
||||
servers
|
||||
|
||||
[computers:vars]
|
||||
http_port=80
|
||||
"""
|
||||
docs_script_contents = """
|
||||
#!/bin/bash
|
||||
echo '{
|
||||
"servers": [
|
||||
"salt.gtmanfred.com"
|
||||
],
|
||||
"desktop": [
|
||||
"home"
|
||||
],
|
||||
"computers": {
|
||||
"hosts": [],
|
||||
"children": [
|
||||
"desktop",
|
||||
"servers"
|
||||
],
|
||||
"vars": {
|
||||
"http_port": 80
|
||||
}
|
||||
},
|
||||
"_meta": {
|
||||
"hostvars": {
|
||||
"salt.gtmanfred.com": {
|
||||
"ansible_ssh_user": "gtmanfred",
|
||||
"ansible_ssh_host": "127.0.0.1",
|
||||
"ansible_sudo_pass": "password",
|
||||
"ansible_ssh_pass": "password",
|
||||
"ansible_ssh_port": 22
|
||||
},
|
||||
"home": {
|
||||
"ansible_ssh_user": "gtmanfred",
|
||||
"ansible_ssh_host": "12.34.56.78",
|
||||
"ansible_sudo_pass": "password",
|
||||
"ansible_ssh_pass": "password",
|
||||
"ansible_ssh_port": 23
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
"""
|
||||
with pytest.helpers.temp_file(
|
||||
"roster.py", roster_py_contents, directory=dpath
|
||||
) as py_roster:
|
||||
|
@ -144,11 +219,17 @@ def roster_dir(tmp_path_factory):
|
|||
"roster.ini", roster_ini_contents, directory=dpath
|
||||
), pytest.helpers.temp_file(
|
||||
"roster.yml", roster_yaml_contents, directory=dpath
|
||||
), pytest.helpers.temp_file(
|
||||
"roster-docs.ini", docs_ini_contents, directory=dpath
|
||||
):
|
||||
try:
|
||||
yield dpath
|
||||
finally:
|
||||
shutil.rmtree(str(dpath), ignore_errors=True)
|
||||
with pytest.helpers.temp_file(
|
||||
"roster-docs.sh", docs_script_contents, directory=dpath
|
||||
) as script_roster:
|
||||
script_roster.chmod(0o755)
|
||||
try:
|
||||
yield dpath
|
||||
finally:
|
||||
shutil.rmtree(str(dpath), ignore_errors=True)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
@ -179,3 +260,17 @@ def test_script(roster_opts, roster_dir, expected_targets_return):
|
|||
with patch.dict(ansible.__opts__, roster_opts):
|
||||
ret = ansible.targets("*")
|
||||
assert ret == expected_targets_return
|
||||
|
||||
|
||||
def test_docs_ini(roster_opts, roster_dir, expected_docs_targets_return):
|
||||
roster_opts["roster_file"] = str(roster_dir / "roster-docs.ini")
|
||||
with patch.dict(ansible.__opts__, roster_opts):
|
||||
ret = ansible.targets("*")
|
||||
assert ret == expected_docs_targets_return
|
||||
|
||||
|
||||
def test_docs_script(roster_opts, roster_dir, expected_docs_targets_return):
|
||||
roster_opts["roster_file"] = str(roster_dir / "roster-docs.sh")
|
||||
with patch.dict(ansible.__opts__, roster_opts):
|
||||
ret = ansible.targets("*")
|
||||
assert ret == expected_docs_targets_return
|
||||
|
|
752
tests/pytests/unit/states/test_jboss7.py
Normal file
752
tests/pytests/unit/states/test_jboss7.py
Normal file
|
@ -0,0 +1,752 @@
|
|||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.states.jboss7 as jboss7
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {
|
||||
jboss7: {
|
||||
"__salt__": {
|
||||
"jboss7.read_datasource": MagicMock(),
|
||||
"jboss7.create_datasource": MagicMock(),
|
||||
"jboss7.update_datasource": MagicMock(),
|
||||
"jboss7.remove_datasource": MagicMock(),
|
||||
"jboss7.read_simple_binding": MagicMock(),
|
||||
"jboss7.create_simple_binding": MagicMock(),
|
||||
"jboss7.update_simple_binding": MagicMock(),
|
||||
"jboss7.undeploy": MagicMock(),
|
||||
"jboss7.deploy": MagicMock,
|
||||
"file.get_managed": MagicMock,
|
||||
"file.manage_file": MagicMock,
|
||||
"jboss7.list_deployments": MagicMock,
|
||||
},
|
||||
"__env__": "base",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def test_should_not_redeploy_unchanged():
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": False,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": False}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
assert not jboss7_undeploy_mock.called
|
||||
assert not jboss7_deploy_mock.called
|
||||
|
||||
|
||||
def test_should_redeploy_changed():
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": False,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": True}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
assert jboss7_undeploy_mock.called
|
||||
assert jboss7_deploy_mock.called
|
||||
|
||||
|
||||
def test_should_deploy_different_artifact():
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": False,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_other_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": False}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
assert not jboss7_undeploy_mock.called
|
||||
assert jboss7_deploy_mock.called
|
||||
|
||||
|
||||
def test_should_redploy_undeploy_force():
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": True,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": False}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
assert jboss7_undeploy_mock.called
|
||||
assert jboss7_deploy_mock.called
|
||||
|
||||
|
||||
def test_should_create_new_datasource_if_not_exists():
|
||||
# given
|
||||
datasource_properties = {"connection-url": "jdbc:/old-connection-url"}
|
||||
ds_status = {"created": False}
|
||||
|
||||
def read_func(jboss_config, name, profile):
|
||||
if ds_status["created"]:
|
||||
return {"success": True, "result": datasource_properties}
|
||||
else:
|
||||
return {"success": False, "err_code": "JBAS014807"}
|
||||
|
||||
def create_func(jboss_config, name, datasource_properties, profile):
|
||||
ds_status["created"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock(side_effect=create_func)
|
||||
update_mock = MagicMock()
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
|
||||
# when
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties=datasource_properties,
|
||||
profile=None,
|
||||
)
|
||||
|
||||
# then
|
||||
create_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties=datasource_properties,
|
||||
profile=None,
|
||||
)
|
||||
|
||||
assert not update_mock.called
|
||||
assert result["comment"] == "Datasource created."
|
||||
|
||||
|
||||
def test_should_update_the_datasource_if_exists():
|
||||
ds_status = {"updated": False}
|
||||
|
||||
def read_func(jboss_config, name, profile):
|
||||
if ds_status["updated"]:
|
||||
return {
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/new-connection-url"},
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/old-connection-url"},
|
||||
}
|
||||
|
||||
def update_func(jboss_config, name, new_properties, profile):
|
||||
ds_status["updated"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock(side_effect=update_func)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/new-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
|
||||
update_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
new_properties={"connection-url": "jdbc:/new-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
assert read_mock.called
|
||||
assert result["comment"] == "Datasource updated."
|
||||
|
||||
|
||||
def test_should_recreate_the_datasource_if_specified():
|
||||
read_mock = MagicMock(
|
||||
return_value={
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/same-connection-url"},
|
||||
}
|
||||
)
|
||||
create_mock = MagicMock(return_value={"success": True})
|
||||
remove_mock = MagicMock(return_value={"success": True})
|
||||
update_mock = MagicMock()
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.remove_datasource": remove_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/same-connection-url"},
|
||||
recreate=True,
|
||||
)
|
||||
|
||||
remove_mock.assert_called_with(name="appDS", jboss_config={}, profile=None)
|
||||
create_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/same-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
assert result["changes"]["removed"] == "appDS"
|
||||
assert result["changes"]["created"] == "appDS"
|
||||
|
||||
|
||||
def test_should_inform_if_the_datasource_has_not_changed():
|
||||
read_mock = MagicMock(
|
||||
return_value={
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/same-connection-url"},
|
||||
}
|
||||
)
|
||||
create_mock = MagicMock()
|
||||
remove_mock = MagicMock()
|
||||
update_mock = MagicMock(return_value={"success": True})
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.remove_datasource": remove_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/old-connection-url"},
|
||||
)
|
||||
|
||||
update_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
new_properties={"connection-url": "jdbc:/old-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
assert not create_mock.called
|
||||
assert result["comment"] == "Datasource not changed."
|
||||
|
||||
|
||||
def test_should_create_binding_if_not_exists():
|
||||
# given
|
||||
binding_status = {"created": False}
|
||||
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
if binding_status["created"]:
|
||||
return {"success": True, "result": {"value": "DEV"}}
|
||||
else:
|
||||
return {"success": False, "err_code": "JBAS014807"}
|
||||
|
||||
def create_func(jboss_config, binding_name, value, profile):
|
||||
binding_status["created"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock(side_effect=create_func)
|
||||
update_mock = MagicMock()
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
|
||||
# when
|
||||
result = jboss7.bindings_exist(
|
||||
name="bindings", jboss_config={}, bindings={"env": "DEV"}, profile=None
|
||||
)
|
||||
|
||||
# then
|
||||
create_mock.assert_called_with(
|
||||
jboss_config={}, binding_name="env", value="DEV", profile=None
|
||||
)
|
||||
assert update_mock.call_count == 0
|
||||
assert result["changes"] == {"added": "env:DEV\n"}
|
||||
assert result["comment"] == "Bindings changed."
|
||||
|
||||
|
||||
def test_should_update_bindings_if_exists_and_different():
|
||||
# given
|
||||
binding_status = {"updated": False}
|
||||
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
if binding_status["updated"]:
|
||||
return {"success": True, "result": {"value": "DEV2"}}
|
||||
else:
|
||||
return {"success": True, "result": {"value": "DEV"}}
|
||||
|
||||
def update_func(jboss_config, binding_name, value, profile):
|
||||
binding_status["updated"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock(side_effect=update_func)
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.bindings_exist(
|
||||
name="bindings", jboss_config={}, bindings={"env": "DEV2"}, profile=None
|
||||
)
|
||||
|
||||
# then
|
||||
update_mock.assert_called_with(
|
||||
jboss_config={}, binding_name="env", value="DEV2", profile=None
|
||||
)
|
||||
assert create_mock.call_count == 0
|
||||
assert result["changes"] == {"changed": "env:DEV->DEV2\n"}
|
||||
assert result["comment"] == "Bindings changed."
|
||||
|
||||
|
||||
def test_should_not_update_bindings_if_same():
|
||||
# given
|
||||
read_mock = MagicMock(return_value={"success": True, "result": {"value": "DEV2"}})
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock()
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.bindings_exist(
|
||||
name="bindings", jboss_config={}, bindings={"env": "DEV2"}
|
||||
)
|
||||
|
||||
# then
|
||||
assert create_mock.call_count == 0
|
||||
assert update_mock.call_count == 0
|
||||
assert result["changes"] == {}
|
||||
assert result["comment"] == "Bindings not changed."
|
||||
|
||||
|
||||
def test_should_raise_exception_if_cannot_create_binding():
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
return {"success": False, "err_code": "JBAS014807"}
|
||||
|
||||
def create_func(jboss_config, binding_name, value, profile):
|
||||
return {"success": False, "failure-description": "Incorrect binding name."}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock(side_effect=create_func)
|
||||
update_mock = MagicMock()
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
jboss7.bindings_exist(
|
||||
name="bindings",
|
||||
jboss_config={},
|
||||
bindings={"env": "DEV2"},
|
||||
profile=None,
|
||||
)
|
||||
assert str(exc.value) == "Incorrect binding name."
|
||||
|
||||
|
||||
def test_should_raise_exception_if_cannot_update_binding():
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
return {"success": True, "result": {"value": "DEV"}}
|
||||
|
||||
def update_func(jboss_config, binding_name, value, profile):
|
||||
return {"success": False, "failure-description": "Incorrect binding name."}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock(side_effect=update_func)
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
|
||||
# when
|
||||
with pytest.raises(CommandExecutionError) as exc:
|
||||
jboss7.bindings_exist(
|
||||
name="bindings",
|
||||
jboss_config={},
|
||||
bindings={"env": "DEV2"},
|
||||
profile=None,
|
||||
)
|
||||
assert str(exc.value) == "Incorrect binding name."
|
||||
|
||||
|
||||
def test_datasource_exist_create_datasource_good_code():
|
||||
jboss_config = {
|
||||
"cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh",
|
||||
"controller": "127.0.0.1: 9990",
|
||||
"cli_user": "user",
|
||||
"cli_password": "user",
|
||||
}
|
||||
|
||||
datasource_properties = {
|
||||
"driver - name": "h2",
|
||||
"connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2",
|
||||
"jndi - name": (
|
||||
"java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar"
|
||||
),
|
||||
"user - name": "user",
|
||||
"password": "user",
|
||||
"use - java - context": True,
|
||||
}
|
||||
|
||||
read_datasource = MagicMock(
|
||||
return_value={"success": False, "err_code": "WFLYCTL0216"}
|
||||
)
|
||||
|
||||
error_msg = "Error: -1"
|
||||
create_datasource = MagicMock(return_value={"success": False, "stdout": error_msg})
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_datasource,
|
||||
"jboss7.create_datasource": create_datasource,
|
||||
},
|
||||
):
|
||||
ret = jboss7.datasource_exists("SQL", jboss_config, datasource_properties)
|
||||
|
||||
assert "result" in ret
|
||||
assert not ret["result"]
|
||||
assert "comment" in ret
|
||||
assert error_msg in ret["comment"]
|
||||
|
||||
read_datasource.assert_called_once()
|
||||
create_datasource.assert_called_once()
|
||||
|
||||
|
||||
def test_datasource_exist_create_datasource_bad_code():
|
||||
jboss_config = {
|
||||
"cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh",
|
||||
"controller": "127.0.0.1: 9990",
|
||||
"cli_user": "user",
|
||||
"cli_password": "user",
|
||||
}
|
||||
|
||||
datasource_properties = {
|
||||
"driver - name": "h2",
|
||||
"connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2",
|
||||
"jndi - name": (
|
||||
"java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar"
|
||||
),
|
||||
"user - name": "user",
|
||||
"password": "user",
|
||||
"use - java - context": True,
|
||||
}
|
||||
|
||||
read_datasource = MagicMock(
|
||||
return_value={
|
||||
"success": False,
|
||||
"err_code": "WFLYCTL0217",
|
||||
"failure-description": "Something happened",
|
||||
}
|
||||
)
|
||||
|
||||
with patch.dict(jboss7.__salt__, {"jboss7.read_datasource": read_datasource}):
|
||||
pytest.raises(
|
||||
CommandExecutionError,
|
||||
jboss7.datasource_exists,
|
||||
"SQL",
|
||||
jboss_config,
|
||||
datasource_properties,
|
||||
)
|
||||
read_datasource.assert_called_once()
|
811
tests/pytests/unit/states/test_kubernetes.py
Normal file
811
tests/pytests/unit/states/test_kubernetes.py
Normal file
|
@ -0,0 +1,811 @@
|
|||
"""
|
||||
:codeauthor: :email:`Jeff Schroeder <jeffschroeder@computer.org>`
|
||||
|
||||
Test cases for salt.states.kubernetes
|
||||
"""
|
||||
|
||||
import base64
|
||||
from contextlib import contextmanager
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.kubernetesmod as kubernetesmod
|
||||
import salt.states.kubernetes as kubernetes
|
||||
import salt.utils.stringutils
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.skipif(
|
||||
kubernetesmod.HAS_LIBS is False,
|
||||
reason="Kubernetes client lib is not installed.",
|
||||
)
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
return {kubernetes: {"__env__": "base"}}
|
||||
|
||||
|
||||
@contextmanager
|
||||
def mock_func(func_name, return_value, test=False):
|
||||
"""
|
||||
Mock any of the kubernetes state function return values and set
|
||||
the test options.
|
||||
"""
|
||||
name = "kubernetes.{}".format(func_name)
|
||||
mocked = {name: MagicMock(return_value=return_value)}
|
||||
with patch.dict(kubernetes.__salt__, mocked) as patched:
|
||||
with patch.dict(kubernetes.__opts__, {"test": test}):
|
||||
yield patched
|
||||
|
||||
|
||||
def make_configmap(name, namespace="default", data=None):
|
||||
return make_ret_dict(
|
||||
kind="ConfigMap",
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
data=data,
|
||||
)
|
||||
|
||||
|
||||
def make_secret(name, namespace="default", data=None):
|
||||
secret_data = make_ret_dict(
|
||||
kind="Secret",
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
data=data,
|
||||
)
|
||||
# Base64 all of the values just like kubectl does
|
||||
for key, value in secret_data["data"].items():
|
||||
secret_data["data"][key] = base64.b64encode(
|
||||
salt.utils.stringutils.to_bytes(value)
|
||||
)
|
||||
|
||||
return secret_data
|
||||
|
||||
|
||||
def make_node_labels(name="minikube"):
|
||||
return {
|
||||
"kubernetes.io/hostname": name,
|
||||
"beta.kubernetes.io/os": "linux",
|
||||
"beta.kubernetes.io/arch": "amd64",
|
||||
"failure-domain.beta.kubernetes.io/region": "us-west-1",
|
||||
}
|
||||
|
||||
|
||||
def make_node(name="minikube"):
|
||||
node_data = make_ret_dict(kind="Node", name="minikube")
|
||||
node_data.update(
|
||||
{
|
||||
"api_version": "v1",
|
||||
"kind": "Node",
|
||||
"metadata": {
|
||||
"annotations": {"node.alpha.kubernetes.io/ttl": "0"},
|
||||
"labels": make_node_labels(name=name),
|
||||
"name": name,
|
||||
"namespace": None,
|
||||
"link": "/api/v1/nodes/{name}".format(name=name),
|
||||
"uid": "7811b8ae-c1a1-11e7-a55a-0800279fb61e",
|
||||
},
|
||||
"spec": {"external_id": name},
|
||||
"status": {},
|
||||
}
|
||||
)
|
||||
return node_data
|
||||
|
||||
|
||||
def make_namespace(name="default"):
|
||||
namespace_data = make_ret_dict(kind="Namespace", name=name)
|
||||
del namespace_data["data"]
|
||||
namespace_data.update(
|
||||
{
|
||||
"status": {"phase": "Active"},
|
||||
"spec": {"finalizers": ["kubernetes"]},
|
||||
"metadata": {
|
||||
"name": name,
|
||||
"namespace": None,
|
||||
"labels": None,
|
||||
"link": "/api/v1/namespaces/{namespace}".format(
|
||||
namespace=name,
|
||||
),
|
||||
"annotations": None,
|
||||
"uid": "752fceeb-c1a1-11e7-a55a-0800279fb61e",
|
||||
},
|
||||
}
|
||||
)
|
||||
return namespace_data
|
||||
|
||||
|
||||
def make_ret_dict(kind, name, namespace=None, data=None):
|
||||
"""
|
||||
Make a minimal example configmap or secret for using in mocks
|
||||
"""
|
||||
|
||||
assert kind in ("Secret", "ConfigMap", "Namespace", "Node")
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
link = "/api/v1/namespaces/{namespace}/{kind}s/{name}".format(
|
||||
namespace=namespace,
|
||||
kind=kind.lower(),
|
||||
name=name,
|
||||
)
|
||||
|
||||
return_data = {
|
||||
"kind": kind,
|
||||
"data": data,
|
||||
"api_version": "v1",
|
||||
"metadata": {
|
||||
"name": name,
|
||||
"labels": None,
|
||||
"namespace": namespace,
|
||||
"link": link,
|
||||
"annotations": {"kubernetes.io/change-cause": "salt-call state.apply"},
|
||||
},
|
||||
}
|
||||
return return_data
|
||||
|
||||
|
||||
def test_configmap_present__fail():
|
||||
error = kubernetes.configmap_present(
|
||||
name="testme",
|
||||
data={1: 1},
|
||||
source="salt://beyond/oblivion.jinja",
|
||||
)
|
||||
assert error == {
|
||||
"changes": {},
|
||||
"result": False,
|
||||
"name": "testme",
|
||||
"comment": "'source' cannot be used in combination with 'data'",
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_present__create_test_true():
|
||||
# Create a new configmap with test=True
|
||||
with mock_func("show_configmap", return_value=None, test=True):
|
||||
ret = kubernetes.configmap_present(
|
||||
name="example",
|
||||
data={"example.conf": "# empty config file"},
|
||||
)
|
||||
assert ret == {
|
||||
"comment": "The configmap is going to be created",
|
||||
"changes": {},
|
||||
"name": "example",
|
||||
"result": None,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_present__create():
|
||||
# Create a new configmap
|
||||
with mock_func("show_configmap", return_value=None):
|
||||
cm = make_configmap(
|
||||
name="test",
|
||||
namespace="default",
|
||||
data={"foo": "bar"},
|
||||
)
|
||||
with mock_func("create_configmap", return_value=cm):
|
||||
actual = kubernetes.configmap_present(
|
||||
name="test",
|
||||
data={"foo": "bar"},
|
||||
)
|
||||
assert actual == {
|
||||
"comment": "",
|
||||
"changes": {"data": {"foo": "bar"}},
|
||||
"name": "test",
|
||||
"result": True,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_present__create_no_data():
|
||||
# Create a new configmap with no 'data' attribute
|
||||
with mock_func("show_configmap", return_value=None):
|
||||
cm = make_configmap(
|
||||
name="test",
|
||||
namespace="default",
|
||||
)
|
||||
with mock_func("create_configmap", return_value=cm):
|
||||
actual = kubernetes.configmap_present(name="test")
|
||||
assert actual == {
|
||||
"comment": "",
|
||||
"changes": {"data": {}},
|
||||
"name": "test",
|
||||
"result": True,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_present__replace_test_true():
|
||||
cm = make_configmap(
|
||||
name="settings",
|
||||
namespace="saltstack",
|
||||
data={"foobar.conf": "# Example configuration"},
|
||||
)
|
||||
with mock_func("show_configmap", return_value=cm, test=True):
|
||||
ret = kubernetes.configmap_present(
|
||||
name="settings",
|
||||
namespace="saltstack",
|
||||
data={"foobar.conf": "# Example configuration"},
|
||||
)
|
||||
assert ret == {
|
||||
"comment": "The configmap is going to be replaced",
|
||||
"changes": {},
|
||||
"name": "settings",
|
||||
"result": None,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_present__replace():
|
||||
cm = make_configmap(name="settings", data={"action": "make=war"})
|
||||
# Replace an existing configmap
|
||||
with mock_func("show_configmap", return_value=cm):
|
||||
new_cm = cm.copy()
|
||||
new_cm.update({"data": {"action": "make=peace"}})
|
||||
with mock_func("replace_configmap", return_value=new_cm):
|
||||
actual = kubernetes.configmap_present(
|
||||
name="settings",
|
||||
data={"action": "make=peace"},
|
||||
)
|
||||
assert actual == {
|
||||
"comment": ("The configmap is already present. Forcing recreation"),
|
||||
"changes": {"data": {"action": "make=peace"}},
|
||||
"name": "settings",
|
||||
"result": True,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_absent__noop_test_true():
|
||||
# Nothing to delete with test=True
|
||||
with mock_func("show_configmap", return_value=None, test=True):
|
||||
actual = kubernetes.configmap_absent(name="NOT_FOUND")
|
||||
assert actual == {
|
||||
"comment": "The configmap does not exist",
|
||||
"changes": {},
|
||||
"name": "NOT_FOUND",
|
||||
"result": None,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_absent__test_true():
|
||||
# Configmap exists with test=True
|
||||
cm = make_configmap(name="deleteme", namespace="default")
|
||||
with mock_func("show_configmap", return_value=cm, test=True):
|
||||
actual = kubernetes.configmap_absent(name="deleteme")
|
||||
assert actual == {
|
||||
"comment": "The configmap is going to be deleted",
|
||||
"changes": {},
|
||||
"name": "deleteme",
|
||||
"result": None,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_absent__noop():
|
||||
# Nothing to delete
|
||||
with mock_func("show_configmap", return_value=None):
|
||||
actual = kubernetes.configmap_absent(name="NOT_FOUND")
|
||||
assert actual == {
|
||||
"comment": "The configmap does not exist",
|
||||
"changes": {},
|
||||
"name": "NOT_FOUND",
|
||||
"result": True,
|
||||
}
|
||||
|
||||
|
||||
def test_configmap_absent():
|
||||
# Configmap exists, delete it!
|
||||
cm = make_configmap(name="deleteme", namespace="default")
|
||||
with mock_func("show_configmap", return_value=cm):
|
||||
# The return from this module isn't used in the state
|
||||
with mock_func("delete_configmap", return_value={}):
|
||||
actual = kubernetes.configmap_absent(name="deleteme")
|
||||
assert actual == {
|
||||
"comment": "ConfigMap deleted",
|
||||
"changes": {
|
||||
"kubernetes.configmap": {
|
||||
"new": "absent",
|
||||
"old": "present",
|
||||
},
|
||||
},
|
||||
"name": "deleteme",
|
||||
"result": True,
|
||||
}
|
||||
|
||||
|
||||
def test_secret_present__fail():
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "monk3y"},
|
||||
source="salt://nope.jinja",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": False,
|
||||
"name": "sekret",
|
||||
"comment": "'source' cannot be used in combination with 'data'",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_present__exists_test_true():
|
||||
secret = make_secret(name="sekret")
|
||||
new_secret = secret.copy()
|
||||
new_secret.update({"data": {"password": "uncle"}})
|
||||
# Secret exists already and needs replacing with test=True
|
||||
with mock_func("show_secret", return_value=secret):
|
||||
with mock_func("replace_secret", return_value=new_secret, test=True):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "uncle"},
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "sekret",
|
||||
"comment": "The secret is going to be replaced",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_present__exists():
|
||||
# Secret exists and gets replaced
|
||||
secret = make_secret(name="sekret", data={"password": "booyah"})
|
||||
with mock_func("show_secret", return_value=secret):
|
||||
with mock_func("replace_secret", return_value=secret):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "booyah"},
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {"data": ["password"]},
|
||||
"result": True,
|
||||
"name": "sekret",
|
||||
"comment": "The secret is already present. Forcing recreation",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_present__create():
|
||||
# Secret exists and gets replaced
|
||||
secret = make_secret(name="sekret", data={"password": "booyah"})
|
||||
with mock_func("show_secret", return_value=None):
|
||||
with mock_func("create_secret", return_value=secret):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "booyah"},
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {"data": ["password"]},
|
||||
"result": True,
|
||||
"name": "sekret",
|
||||
"comment": "",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_present__create_no_data():
|
||||
# Secret exists and gets replaced
|
||||
secret = make_secret(name="sekret")
|
||||
with mock_func("show_secret", return_value=None):
|
||||
with mock_func("create_secret", return_value=secret):
|
||||
actual = kubernetes.secret_present(name="sekret")
|
||||
assert actual == {
|
||||
"changes": {"data": []},
|
||||
"result": True,
|
||||
"name": "sekret",
|
||||
"comment": "",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_present__create_test_true():
|
||||
# Secret exists and gets replaced with test=True
|
||||
secret = make_secret(name="sekret")
|
||||
with mock_func("show_secret", return_value=None):
|
||||
with mock_func("create_secret", return_value=secret, test=True):
|
||||
actual = kubernetes.secret_present(name="sekret")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "sekret",
|
||||
"comment": "The secret is going to be created",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_absent__noop_test_true():
|
||||
with mock_func("show_secret", return_value=None, test=True):
|
||||
actual = kubernetes.secret_absent(name="sekret")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "sekret",
|
||||
"comment": "The secret does not exist",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_absent__noop():
|
||||
with mock_func("show_secret", return_value=None):
|
||||
actual = kubernetes.secret_absent(name="passwords")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "passwords",
|
||||
"comment": "The secret does not exist",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_absent__delete_test_true():
|
||||
secret = make_secret(name="credentials", data={"redis": "letmein"})
|
||||
with mock_func("show_secret", return_value=secret):
|
||||
with mock_func("delete_secret", return_value=secret, test=True):
|
||||
actual = kubernetes.secret_absent(name="credentials")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "credentials",
|
||||
"comment": "The secret is going to be deleted",
|
||||
}
|
||||
|
||||
|
||||
def test_secret_absent__delete():
|
||||
secret = make_secret(name="foobar", data={"redis": "letmein"})
|
||||
deleted = {
|
||||
"status": None,
|
||||
"kind": "Secret",
|
||||
"code": None,
|
||||
"reason": None,
|
||||
"details": None,
|
||||
"message": None,
|
||||
"api_version": "v1",
|
||||
"metadata": {
|
||||
"link": "/api/v1/namespaces/default/secrets/foobar",
|
||||
"resource_version": "30292",
|
||||
},
|
||||
}
|
||||
with mock_func("show_secret", return_value=secret):
|
||||
with mock_func("delete_secret", return_value=deleted):
|
||||
actual = kubernetes.secret_absent(name="foobar")
|
||||
assert actual == {
|
||||
"changes": {
|
||||
"kubernetes.secret": {"new": "absent", "old": "present"},
|
||||
},
|
||||
"result": True,
|
||||
"name": "foobar",
|
||||
"comment": "Secret deleted",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_present__add_test_true():
|
||||
labels = make_node_labels()
|
||||
with mock_func("node_labels", return_value=labels, test=True):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="com.zoo-animal",
|
||||
node="minikube",
|
||||
value="monkey",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "com.zoo-animal",
|
||||
"comment": "The label is going to be set",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_present__add():
|
||||
node_data = make_node()
|
||||
# Remove some of the defaults to make it simpler
|
||||
node_data["metadata"]["labels"] = {
|
||||
"beta.kubernetes.io/os": "linux",
|
||||
}
|
||||
labels = node_data["metadata"]["labels"]
|
||||
|
||||
with mock_func("node_labels", return_value=labels):
|
||||
with mock_func("node_add_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/zone",
|
||||
node="minikube",
|
||||
value="us-central1-a",
|
||||
)
|
||||
assert actual == {
|
||||
"comment": "",
|
||||
"changes": {
|
||||
"minikube.failure-domain.beta.kubernetes.io/zone": {
|
||||
"new": {
|
||||
"failure-domain.beta.kubernetes.io/zone": ("us-central1-a"),
|
||||
"beta.kubernetes.io/os": "linux",
|
||||
},
|
||||
"old": {"beta.kubernetes.io/os": "linux"},
|
||||
},
|
||||
},
|
||||
"name": "failure-domain.beta.kubernetes.io/zone",
|
||||
"result": True,
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_present__already_set():
|
||||
node_data = make_node()
|
||||
labels = node_data["metadata"]["labels"]
|
||||
with mock_func("node_labels", return_value=labels):
|
||||
with mock_func("node_add_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
value="us-west-1",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": ("The label is already set and has the specified value"),
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_present__update_test_true():
|
||||
node_data = make_node()
|
||||
labels = node_data["metadata"]["labels"]
|
||||
with mock_func("node_labels", return_value=labels):
|
||||
with mock_func("node_add_label", return_value=node_data, test=True):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
value="us-east-1",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": "The label is going to be updated",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_present__update():
|
||||
node_data = make_node()
|
||||
# Remove some of the defaults to make it simpler
|
||||
node_data["metadata"]["labels"] = {
|
||||
"failure-domain.beta.kubernetes.io/region": "us-west-1",
|
||||
}
|
||||
labels = node_data["metadata"]["labels"]
|
||||
with mock_func("node_labels", return_value=labels):
|
||||
with mock_func("node_add_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
value="us-east-1",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {
|
||||
"minikube.failure-domain.beta.kubernetes.io/region": {
|
||||
"new": {
|
||||
"failure-domain.beta.kubernetes.io/region": ("us-east-1")
|
||||
},
|
||||
"old": {
|
||||
"failure-domain.beta.kubernetes.io/region": ("us-west-1")
|
||||
},
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": "The label is already set, changing the value",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_absent__noop_test_true():
|
||||
labels = make_node_labels()
|
||||
with mock_func("node_labels", return_value=labels, test=True):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="non-existent-label",
|
||||
node="minikube",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "non-existent-label",
|
||||
"comment": "The label does not exist",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_absent__noop():
|
||||
labels = make_node_labels()
|
||||
with mock_func("node_labels", return_value=labels):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="non-existent-label",
|
||||
node="minikube",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "non-existent-label",
|
||||
"comment": "The label does not exist",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_absent__delete_test_true():
|
||||
labels = make_node_labels()
|
||||
with mock_func("node_labels", return_value=labels, test=True):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
)
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": "The label is going to be deleted",
|
||||
}
|
||||
|
||||
|
||||
def test_node_label_absent__delete():
|
||||
node_data = make_node()
|
||||
labels = node_data["metadata"]["labels"].copy()
|
||||
|
||||
node_data["metadata"]["labels"].pop("failure-domain.beta.kubernetes.io/region")
|
||||
|
||||
with mock_func("node_labels", return_value=labels):
|
||||
with mock_func("node_remove_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
)
|
||||
assert actual == {
|
||||
"result": True,
|
||||
"changes": {
|
||||
"kubernetes.node_label": {
|
||||
"new": "absent",
|
||||
"old": "present",
|
||||
}
|
||||
},
|
||||
"comment": "Label removed from node",
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_present__create_test_true():
|
||||
with mock_func("show_namespace", return_value=None, test=True):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "saltstack",
|
||||
"comment": "The namespace is going to be created",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_present__create():
|
||||
namespace_data = make_namespace(name="saltstack")
|
||||
with mock_func("show_namespace", return_value=None):
|
||||
with mock_func("create_namespace", return_value=namespace_data):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
assert actual == {
|
||||
"changes": {"namespace": {"new": namespace_data, "old": {}}},
|
||||
"result": True,
|
||||
"name": "saltstack",
|
||||
"comment": "",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_present__noop_test_true():
|
||||
namespace_data = make_namespace(name="saltstack")
|
||||
with mock_func("show_namespace", return_value=namespace_data, test=True):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "saltstack",
|
||||
"comment": "The namespace already exists",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_present__noop():
|
||||
namespace_data = make_namespace(name="saltstack")
|
||||
with mock_func("show_namespace", return_value=namespace_data):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "saltstack",
|
||||
"comment": "The namespace already exists",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__noop_test_true():
|
||||
with mock_func("show_namespace", return_value=None, test=True):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "salt",
|
||||
"comment": "The namespace does not exist",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__noop():
|
||||
with mock_func("show_namespace", return_value=None):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "The namespace does not exist",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__delete_test_true():
|
||||
namespace_data = make_namespace(name="salt")
|
||||
with mock_func("show_namespace", return_value=namespace_data, test=True):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "salt",
|
||||
"comment": "The namespace is going to be deleted",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__delete_code_200():
|
||||
namespace_data = make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted["code"] = 200
|
||||
deleted.update({"code": 200, "message": None})
|
||||
with mock_func("show_namespace", return_value=namespace_data):
|
||||
with mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {
|
||||
"kubernetes.namespace": {"new": "absent", "old": "present"}
|
||||
},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "Terminating",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__delete_status_terminating():
|
||||
namespace_data = make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted.update(
|
||||
{
|
||||
"code": None,
|
||||
"status": "Terminating namespace",
|
||||
"message": "Terminating this shizzzle yo",
|
||||
}
|
||||
)
|
||||
with mock_func("show_namespace", return_value=namespace_data):
|
||||
with mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {
|
||||
"kubernetes.namespace": {"new": "absent", "old": "present"}
|
||||
},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "Terminating this shizzzle yo",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__delete_status_phase_terminating():
|
||||
# This is what kubernetes 1.8.0 looks like when deleting namespaces
|
||||
namespace_data = make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted.update({"code": None, "message": None, "status": {"phase": "Terminating"}})
|
||||
with mock_func("show_namespace", return_value=namespace_data):
|
||||
with mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {
|
||||
"kubernetes.namespace": {"new": "absent", "old": "present"}
|
||||
},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "Terminating",
|
||||
}
|
||||
|
||||
|
||||
def test_namespace_absent__delete_error():
|
||||
namespace_data = make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted.update({"code": 418, "message": "I' a teapot!", "status": None})
|
||||
with mock_func("show_namespace", return_value=namespace_data):
|
||||
with mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
assert actual == {
|
||||
"changes": {},
|
||||
"result": False,
|
||||
"name": "salt",
|
||||
"comment": "Something went wrong, response: {}".format(
|
||||
deleted,
|
||||
),
|
||||
}
|
|
@ -7,6 +7,8 @@ import pytest
|
|||
import salt.states.selinux as selinux
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
pytestmark = [pytest.mark.skip_unless_on_linux]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules():
|
||||
|
|
|
@ -123,8 +123,8 @@ def test_present_invalid_gid_change():
|
|||
)
|
||||
dunder_salt = {
|
||||
"user.info": mock_info,
|
||||
"file.group_to_gid": MagicMock(side_effect=["foo"]),
|
||||
"file.gid_to_group": MagicMock(side_effect=[5000, 5000]),
|
||||
"file.group_to_gid": MagicMock(return_value="foo"),
|
||||
"file.gid_to_group": MagicMock(return_value=5000),
|
||||
}
|
||||
with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict(
|
||||
user.__salt__, dunder_salt
|
||||
|
@ -148,8 +148,8 @@ def test_present_invalid_uid_gid_change():
|
|||
)
|
||||
dunder_salt = {
|
||||
"user.info": mock_info,
|
||||
"file.group_to_gid": MagicMock(side_effect=["foo"]),
|
||||
"file.gid_to_group": MagicMock(side_effect=[5000, 5000]),
|
||||
"file.group_to_gid": MagicMock(return_value="foo"),
|
||||
"file.gid_to_group": MagicMock(return_value=5000),
|
||||
}
|
||||
with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict(
|
||||
user.__salt__, dunder_salt
|
||||
|
@ -179,7 +179,7 @@ def test_present_uid_gid_change():
|
|||
# get the before/after for the changes dict, and one last time to
|
||||
# confirm that no changes still need to be made.
|
||||
mock_info = MagicMock(side_effect=[before, before, after, after])
|
||||
mock_group_to_gid = MagicMock(side_effect=[5000, 5001])
|
||||
mock_group_to_gid = MagicMock(side_effect=[5000, 5000, 5001, 5001])
|
||||
mock_gid_to_group = MagicMock(
|
||||
side_effect=["othergroup", "foo", "othergroup", "othergroup"]
|
||||
)
|
||||
|
@ -254,12 +254,11 @@ def test_changes():
|
|||
"file.gid_to_group": MagicMock(side_effect=[5000, 5000]),
|
||||
}
|
||||
|
||||
def mock_exists(*args):
|
||||
return True
|
||||
|
||||
with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict(
|
||||
user.__salt__, dunder_salt
|
||||
), patch.dict(user.__opts__, {"test": False}), patch("os.path.isdir", mock_exists):
|
||||
), patch.dict(user.__opts__, {"test": False}), patch(
|
||||
"os.path.isdir", MagicMock(return_value=True)
|
||||
):
|
||||
ret = user._changes("foo", maxdays=999999, inactdays=0, warndays=7)
|
||||
assert ret == {
|
||||
"maxdays": 999999,
|
||||
|
@ -459,3 +458,43 @@ def test_present_password_unlock():
|
|||
else:
|
||||
unlock_password.assert_called_once()
|
||||
unlock_account.assert_not_called()
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"current,wanted,remove,return_value,expected",
|
||||
[
|
||||
(["grp1"], ["grp1"], False, MagicMock(return_value={"gid": 100}), False),
|
||||
(
|
||||
["grp1"],
|
||||
["grp1", "grp2"],
|
||||
False,
|
||||
MagicMock(side_effect=[{"gid": 100}, {"gid": 200}]),
|
||||
True,
|
||||
),
|
||||
(
|
||||
["grp1"],
|
||||
["grp1", "grp2"],
|
||||
False,
|
||||
MagicMock(side_effect=[{"gid": 100}, {"gid": 100}]),
|
||||
False,
|
||||
),
|
||||
(
|
||||
["grp1", "grp2"],
|
||||
["grp1"],
|
||||
True,
|
||||
MagicMock(side_effect=[{"gid": 100}, {"gid": 200}]),
|
||||
True,
|
||||
),
|
||||
(
|
||||
["grp1", "grp2"],
|
||||
["grp1"],
|
||||
True,
|
||||
MagicMock(side_effect=[{"gid": 100}, {"gid": 100}]),
|
||||
False,
|
||||
),
|
||||
],
|
||||
)
|
||||
def test__group_changes(current, wanted, remove, return_value, expected):
|
||||
with patch.dict(user.__salt__, {"group.info": return_value}):
|
||||
ret = user._group_changes(current, wanted, remove)
|
||||
assert ret == expected
|
||||
|
|
|
@ -217,6 +217,38 @@ def test_machine_value_present(empty_reg_pol_mach):
|
|||
assert result == expected
|
||||
|
||||
|
||||
def test_machine_value_present_similar_names(empty_reg_pol_mach):
|
||||
"""
|
||||
Test value.present in Machine policy
|
||||
"""
|
||||
lgpo_reg.value_present(
|
||||
name="MyValueTwo",
|
||||
key="SOFTWARE\\MyKey1",
|
||||
v_data="1",
|
||||
v_type="REG_DWORD",
|
||||
)
|
||||
lgpo_reg.value_present(
|
||||
name="MyValue",
|
||||
key="SOFTWARE\\MyKey1",
|
||||
v_data="1",
|
||||
v_type="REG_DWORD",
|
||||
)
|
||||
expected = {
|
||||
"SOFTWARE\\MyKey1": {
|
||||
"MyValue": {
|
||||
"type": "REG_DWORD",
|
||||
"data": 1,
|
||||
},
|
||||
"MyValueTwo": {
|
||||
"type": "REG_DWORD",
|
||||
"data": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
result = win_lgpo_reg.read_reg_pol(policy_class="Machine")
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_machine_value_present_enforce(reg_pol_mach):
|
||||
"""
|
||||
Issue #64222
|
||||
|
@ -614,6 +646,40 @@ def test_user_value_present(empty_reg_pol_user):
|
|||
assert result == expected
|
||||
|
||||
|
||||
def test_user_value_present_similar_names(empty_reg_pol_user):
|
||||
"""
|
||||
Test value.present in User policy
|
||||
"""
|
||||
lgpo_reg.value_present(
|
||||
name="MyValueTwo",
|
||||
key="SOFTWARE\\MyKey1",
|
||||
v_data="1",
|
||||
v_type="REG_DWORD",
|
||||
policy_class="User",
|
||||
)
|
||||
lgpo_reg.value_present(
|
||||
name="MyValue",
|
||||
key="SOFTWARE\\MyKey1",
|
||||
v_data="1",
|
||||
v_type="REG_DWORD",
|
||||
policy_class="User",
|
||||
)
|
||||
expected = {
|
||||
"SOFTWARE\\MyKey1": {
|
||||
"MyValue": {
|
||||
"type": "REG_DWORD",
|
||||
"data": 1,
|
||||
},
|
||||
"MyValueTwo": {
|
||||
"type": "REG_DWORD",
|
||||
"data": 1,
|
||||
},
|
||||
},
|
||||
}
|
||||
result = win_lgpo_reg.read_reg_pol(policy_class="User")
|
||||
assert result == expected
|
||||
|
||||
|
||||
def test_user_value_present_existing_change(reg_pol_user):
|
||||
"""
|
||||
Test value.present with existing incorrect value in User policy
|
||||
|
|
|
@ -1,19 +1,122 @@
|
|||
import io
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.state
|
||||
from salt import template
|
||||
from salt.config import minion_config
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
def test_compile_template_str_mkstemp_cleanup():
|
||||
@pytest.fixture
|
||||
def render_dict():
|
||||
return {
|
||||
"jinja": "fake_jinja_func",
|
||||
"json": "fake_json_func",
|
||||
"mako": "fake_make_func",
|
||||
}
|
||||
|
||||
|
||||
def test_compile_template_str_mkstemp_cleanup(minion_opts):
|
||||
minion_opts["file_client"] = "local"
|
||||
with patch("os.unlink", MagicMock()) as unlinked:
|
||||
_config = minion_config(None)
|
||||
_config["file_client"] = "local"
|
||||
_state = salt.state.State(_config)
|
||||
assert template.compile_template_str(
|
||||
_state = salt.state.State(minion_opts)
|
||||
ret = template.compile_template_str(
|
||||
"{'val':'test'}",
|
||||
_state.rend,
|
||||
_state.opts["renderer"],
|
||||
_state.opts["renderer_blacklist"],
|
||||
_state.opts["renderer_whitelist"],
|
||||
) == {"val": "test"}
|
||||
)
|
||||
assert ret == {"val": "test"}
|
||||
unlinked.assert_called_once()
|
||||
|
||||
|
||||
def test_compile_template_bad_type():
|
||||
"""
|
||||
Test to ensure that unsupported types cannot be passed to the template compiler
|
||||
"""
|
||||
ret = template.compile_template(["1", "2", "3"], None, None, None, None)
|
||||
assert ret == {}
|
||||
|
||||
|
||||
def test_compile_template_preserves_windows_newlines():
|
||||
"""
|
||||
Test to ensure that a file with Windows newlines, when rendered by a
|
||||
template renderer, does not eat the CR character.
|
||||
"""
|
||||
|
||||
def _get_rend(renderer, value):
|
||||
"""
|
||||
We need a new MagicMock each time since we're dealing with StringIO
|
||||
objects which are read like files.
|
||||
"""
|
||||
return {renderer: MagicMock(return_value=io.StringIO(value))}
|
||||
|
||||
input_data_windows = "foo\r\nbar\r\nbaz\r\n"
|
||||
input_data_non_windows = input_data_windows.replace("\r\n", "\n")
|
||||
renderer = "test"
|
||||
blacklist = whitelist = []
|
||||
|
||||
ret = template.compile_template(
|
||||
":string:",
|
||||
_get_rend(renderer, input_data_non_windows),
|
||||
renderer,
|
||||
blacklist,
|
||||
whitelist,
|
||||
input_data=input_data_windows,
|
||||
).read()
|
||||
# Even though the mocked renderer returned a string without the windows
|
||||
# newlines, the compiled template should still have them.
|
||||
assert ret == input_data_windows
|
||||
|
||||
# Now test that we aren't adding them in unnecessarily.
|
||||
ret = template.compile_template(
|
||||
":string:",
|
||||
_get_rend(renderer, input_data_non_windows),
|
||||
renderer,
|
||||
blacklist,
|
||||
whitelist,
|
||||
input_data=input_data_non_windows,
|
||||
).read()
|
||||
assert ret == input_data_non_windows
|
||||
|
||||
# Finally, ensure that we're not unnecessarily replacing the \n with
|
||||
# \r\n in the event that the renderer returned a string with the
|
||||
# windows newlines intact.
|
||||
ret = template.compile_template(
|
||||
":string:",
|
||||
_get_rend(renderer, input_data_windows),
|
||||
renderer,
|
||||
blacklist,
|
||||
whitelist,
|
||||
input_data=input_data_windows,
|
||||
).read()
|
||||
assert ret == input_data_windows
|
||||
|
||||
|
||||
def test_check_render_pipe_str(render_dict):
|
||||
"""
|
||||
Check that all renderers specified in the pipe string are available.
|
||||
"""
|
||||
ret = template.check_render_pipe_str("jinja|json", render_dict, None, None)
|
||||
assert ("fake_jinja_func", "") in ret
|
||||
assert ("fake_json_func", "") in ret
|
||||
assert ("OBVIOUSLY_NOT_HERE", "") not in ret
|
||||
|
||||
|
||||
def test_check_renderer_blacklisting(render_dict):
|
||||
"""
|
||||
Check that all renderers specified in the pipe string are available.
|
||||
"""
|
||||
ret = template.check_render_pipe_str("jinja|json", render_dict, ["jinja"], None)
|
||||
assert ret == [("fake_json_func", "")]
|
||||
ret = template.check_render_pipe_str("jinja|json", render_dict, None, ["jinja"])
|
||||
assert ret == [("fake_jinja_func", "")]
|
||||
ret = template.check_render_pipe_str(
|
||||
"jinja|json", render_dict, ["jinja"], ["jinja"]
|
||||
)
|
||||
assert ret == []
|
||||
ret = template.check_render_pipe_str(
|
||||
"jinja|json", render_dict, ["jinja"], ["jinja", "json"]
|
||||
)
|
||||
assert ret == [("fake_json_func", "")]
|
||||
|
|
0
tests/pytests/unit/utils/templates/__init__.py
Normal file
0
tests/pytests/unit/utils/templates/__init__.py
Normal file
9
tests/pytests/unit/utils/templates/conftest.py
Normal file
9
tests/pytests/unit/utils/templates/conftest.py
Normal file
|
@ -0,0 +1,9 @@
|
|||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def render_context():
|
||||
return {
|
||||
"opts": {"cachedir": "/D", "__cli": "salt"},
|
||||
"saltenv": None,
|
||||
}
|
47
tests/pytests/unit/utils/templates/test_cheetah.py
Normal file
47
tests/pytests/unit/utils/templates/test_cheetah.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
import pytest
|
||||
from salt.utils.templates import render_cheetah_tmpl
|
||||
|
||||
pytest.importorskip("Cheetah")
|
||||
|
||||
|
||||
def test_render_sanity(render_context):
|
||||
tmpl = """OK"""
|
||||
res = render_cheetah_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_evaluate(render_context):
|
||||
tmpl = """<%="OK"%>"""
|
||||
res = render_cheetah_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_evaluate_xml(render_context):
|
||||
tmpl = """
|
||||
<% if 1: %>
|
||||
OK
|
||||
<% pass %>
|
||||
"""
|
||||
res = render_cheetah_tmpl(tmpl, render_context)
|
||||
stripped = res.strip()
|
||||
assert stripped == "OK"
|
||||
|
||||
|
||||
def test_render_evaluate_text(render_context):
|
||||
tmpl = """
|
||||
#if 1
|
||||
OK
|
||||
#end if
|
||||
"""
|
||||
|
||||
res = render_cheetah_tmpl(tmpl, render_context)
|
||||
stripped = res.strip()
|
||||
assert stripped == "OK"
|
||||
|
||||
|
||||
def test_render_variable(render_context):
|
||||
tmpl = """$var"""
|
||||
|
||||
render_context["var"] = "OK"
|
||||
res = render_cheetah_tmpl(tmpl, render_context)
|
||||
assert res.strip() == "OK"
|
36
tests/pytests/unit/utils/templates/test_genshi.py
Normal file
36
tests/pytests/unit/utils/templates/test_genshi.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
import pytest
|
||||
from salt.utils.templates import render_genshi_tmpl
|
||||
|
||||
pytest.importorskip("genshi")
|
||||
|
||||
|
||||
def test_render_sanity(render_context):
|
||||
tmpl = """<RU>OK</RU>"""
|
||||
res = render_genshi_tmpl(tmpl, render_context)
|
||||
assert res == "<RU>OK</RU>"
|
||||
|
||||
|
||||
def test_render_evaluate(render_context):
|
||||
tmpl = """<RU>${ "OK" }</RU>"""
|
||||
res = render_genshi_tmpl(tmpl, render_context)
|
||||
assert res == "<RU>OK</RU>"
|
||||
|
||||
|
||||
def test_render_evaluate_condition(render_context):
|
||||
tmpl = """<RU xmlns:py="http://genshi.edgewall.org/" py:if="1">OK</RU>"""
|
||||
res = render_genshi_tmpl(tmpl, render_context)
|
||||
assert res == "<RU>OK</RU>"
|
||||
|
||||
|
||||
def test_render_variable(render_context):
|
||||
tmpl = """<RU>$var</RU>"""
|
||||
render_context["var"] = "OK"
|
||||
res = render_genshi_tmpl(tmpl, render_context)
|
||||
assert res == "<RU>OK</RU>"
|
||||
|
||||
|
||||
def test_render_variable_replace(render_context):
|
||||
tmpl = """<RU xmlns:py="http://genshi.edgewall.org/" py:content="var">not ok</RU>"""
|
||||
render_context["var"] = "OK"
|
||||
res = render_genshi_tmpl(tmpl, render_context)
|
||||
assert res == "<RU>OK</RU>"
|
|
@ -1,41 +1,17 @@
|
|||
"""
|
||||
Tests for salt.utils.templates
|
||||
"""
|
||||
import os
|
||||
import re
|
||||
|
||||
from collections import OrderedDict
|
||||
import pytest
|
||||
from salt.exceptions import SaltRenderError
|
||||
from salt.utils.templates import render_jinja_tmpl
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def minion_opts(tmp_path, minion_opts):
|
||||
minion_opts.update(
|
||||
{
|
||||
"cachedir": str(tmp_path / "jinja-template-cache"),
|
||||
"file_buffer_size": 1048576,
|
||||
"file_client": "local",
|
||||
"file_ignore_regex": None,
|
||||
"file_ignore_glob": None,
|
||||
"file_roots": {"test": [str(tmp_path / "templates")]},
|
||||
"pillar_roots": {"test": [str(tmp_path / "templates")]},
|
||||
"fileserver_backend": ["roots"],
|
||||
"hash_type": "md5",
|
||||
"extension_modules": os.path.join(
|
||||
os.path.dirname(os.path.abspath(__file__)), "extmods"
|
||||
),
|
||||
}
|
||||
)
|
||||
return minion_opts
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def local_salt():
|
||||
return {}
|
||||
|
||||
|
||||
def test_jinja_undefined_error_context(minion_opts, local_salt):
|
||||
def test_undefined_error_context(render_context):
|
||||
"""
|
||||
Test that jinja provides both the line number on which the error occurred
|
||||
in the Jinja template, and also several lines of context around the error
|
||||
|
@ -63,5 +39,64 @@ def test_jinja_undefined_error_context(minion_opts, local_salt):
|
|||
with pytest.raises(SaltRenderError, match=match_regex):
|
||||
render_jinja_tmpl(
|
||||
jinja_code,
|
||||
dict(opts=minion_opts, saltenv="test", salt=local_salt),
|
||||
render_context,
|
||||
)
|
||||
|
||||
|
||||
def test_render_sanity(render_context):
|
||||
tmpl = """OK"""
|
||||
res = render_jinja_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_evaluate(render_context):
|
||||
tmpl = """{{ "OK" }}"""
|
||||
res = render_jinja_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_evaluate_multi(render_context):
|
||||
tmpl = """{% if 1 -%}OK{%- endif %}"""
|
||||
res = render_jinja_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_variable(render_context):
|
||||
tmpl = """{{ var }}"""
|
||||
render_context["var"] = "OK"
|
||||
res = render_jinja_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_tojson_sorted(render_context):
|
||||
templ = """thing: {{ var|tojson(sort_keys=True) }}"""
|
||||
expected = """thing: {"x": "xxx", "y": "yyy", "z": "zzz"}"""
|
||||
|
||||
with patch.dict(render_context, {"var": {"z": "zzz", "y": "yyy", "x": "xxx"}}):
|
||||
res = render_jinja_tmpl(templ, render_context)
|
||||
|
||||
assert res == expected
|
||||
|
||||
|
||||
def test_render_tojson_unsorted(render_context):
|
||||
templ = """thing: {{ var|tojson(sort_keys=False) }}"""
|
||||
expected = """thing: {"z": "zzz", "x": "xxx", "y": "yyy"}"""
|
||||
|
||||
# Values must be added to the dict in the expected order. This is
|
||||
# only necessary for older Pythons that don't remember dict order.
|
||||
d = OrderedDict()
|
||||
d["z"] = "zzz"
|
||||
d["x"] = "xxx"
|
||||
d["y"] = "yyy"
|
||||
|
||||
with patch.dict(render_context, {"var": d}):
|
||||
res = render_jinja_tmpl(templ, render_context)
|
||||
|
||||
assert res == expected
|
||||
|
||||
|
||||
def test_render_cve_2021_25283(render_context):
|
||||
tmpl = """{{ [].__class__ }}"""
|
||||
render_context["var"] = "OK"
|
||||
with pytest.raises(SaltRenderError):
|
||||
res = render_jinja_tmpl(tmpl, render_context)
|
||||
|
|
34
tests/pytests/unit/utils/templates/test_mako.py
Normal file
34
tests/pytests/unit/utils/templates/test_mako.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
import pytest
|
||||
from salt.utils.templates import render_mako_tmpl
|
||||
|
||||
pytest.importorskip("mako")
|
||||
|
||||
|
||||
def test_render_mako_sanity(render_context):
|
||||
tmpl = """OK"""
|
||||
res = render_mako_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_mako_evaluate(render_context):
|
||||
tmpl = """${ "OK" }"""
|
||||
res = render_mako_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_mako_evaluate_multi(render_context):
|
||||
tmpl = """
|
||||
% if 1:
|
||||
OK
|
||||
% endif
|
||||
"""
|
||||
res = render_mako_tmpl(tmpl, render_context)
|
||||
stripped = res.strip()
|
||||
assert stripped == "OK"
|
||||
|
||||
|
||||
def test_render_mako_variable(render_context):
|
||||
tmpl = """${ var }"""
|
||||
render_context["var"] = "OK"
|
||||
res = render_mako_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
29
tests/pytests/unit/utils/templates/test_wempy.py
Normal file
29
tests/pytests/unit/utils/templates/test_wempy.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
import pytest
|
||||
from salt.utils.templates import render_wempy_tmpl
|
||||
|
||||
pytest.importorskip("wemplate")
|
||||
|
||||
|
||||
def test_render_wempy_sanity(render_context):
|
||||
tmpl = """OK"""
|
||||
res = render_wempy_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_wempy_evaluate(render_context):
|
||||
tmpl = """{{="OK"}}"""
|
||||
res = render_wempy_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_wempy_evaluate_multi(render_context):
|
||||
tmpl = """{{if 1:}}OK{{pass}}"""
|
||||
res = render_wempy_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
||||
|
||||
|
||||
def test_render_wempy_variable(render_context):
|
||||
tmpl = """{{=var}}"""
|
||||
render_context["var"] = "OK"
|
||||
res = render_wempy_tmpl(tmpl, render_context)
|
||||
assert res == "OK"
|
217
tests/pytests/unit/utils/templates/test_wrap_tmpl_func.py
Normal file
217
tests/pytests/unit/utils/templates/test_wrap_tmpl_func.py
Normal file
|
@ -0,0 +1,217 @@
|
|||
"""
|
||||
Unit tests for salt.utils.templates.py
|
||||
"""
|
||||
import logging
|
||||
from pathlib import PurePath, PurePosixPath
|
||||
|
||||
import pytest
|
||||
|
||||
from salt.utils.templates import wrap_tmpl_func, generate_sls_context
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MockRender:
|
||||
def __call__(self, tplstr, context, tmplpath=None):
|
||||
self.tplstr = tplstr
|
||||
self.context = context
|
||||
self.tmplpath = tmplpath
|
||||
return tplstr
|
||||
|
||||
|
||||
def _test_generated_sls_context(tmplpath, sls, **expected):
|
||||
"""Generic SLS Context Test"""
|
||||
# DeNormalize tmplpath
|
||||
tmplpath = str(PurePath(PurePosixPath(tmplpath)))
|
||||
if tmplpath.startswith("\\"):
|
||||
tmplpath = "C:{}".format(tmplpath)
|
||||
expected["tplpath"] = tmplpath
|
||||
actual = generate_sls_context(tmplpath, sls)
|
||||
assert {key: actual[key] for key in expected if key in actual} == actual
|
||||
|
||||
|
||||
def test_sls_context_call(tmp_path):
|
||||
"""Check that generate_sls_context is called with proper parameters"""
|
||||
sls = "foo.bar"
|
||||
slsfile = tmp_path / "foo" / "bar.sls"
|
||||
slsfile.parent.mkdir()
|
||||
slsfile.write_text("{{ slspath }}")
|
||||
context = {"opts": {}, "saltenv": "base", "sls": sls}
|
||||
render = MockRender()
|
||||
with patch("salt.utils.templates.generate_sls_context") as generate_sls_context:
|
||||
wrapped = wrap_tmpl_func(render)
|
||||
res = wrapped(str(slsfile), context=context, tmplpath=str(slsfile))
|
||||
generate_sls_context.assert_called_with(str(slsfile), sls)
|
||||
|
||||
|
||||
def test_sls_context_no_call(tmp_path):
|
||||
"""Check that generate_sls_context is not called if sls is not set"""
|
||||
sls = "foo.bar"
|
||||
slsfile = tmp_path / "foo" / "bar.sls"
|
||||
slsfile.parent.mkdir()
|
||||
slsfile.write_text("{{ slspath }}")
|
||||
context = {"opts": {}, "saltenv": "base"}
|
||||
render = MockRender()
|
||||
with patch("salt.utils.templates.generate_sls_context") as generate_sls_context:
|
||||
wrapped = wrap_tmpl_func(render)
|
||||
res = wrapped(str(slsfile), context=context, tmplpath=str(slsfile))
|
||||
generate_sls_context.assert_not_called()
|
||||
|
||||
|
||||
def test_generate_sls_context__top_level():
|
||||
"""generate_sls_context - top_level Use case"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/boo.sls",
|
||||
"boo",
|
||||
tplfile="boo.sls",
|
||||
tpldir=".",
|
||||
tpldot="",
|
||||
slsdotpath="",
|
||||
slscolonpath="",
|
||||
sls_path="",
|
||||
slspath="",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__one_level_init_implicit():
|
||||
"""generate_sls_context - Basic one level with implicit init.sls"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/init.sls",
|
||||
"foo",
|
||||
tplfile="foo/init.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__one_level_init_explicit():
|
||||
"""generate_sls_context - Basic one level with explicit init.sls"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/init.sls",
|
||||
"foo.init",
|
||||
tplfile="foo/init.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__one_level():
|
||||
"""generate_sls_context - Basic one level with name"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/boo.sls",
|
||||
"foo.boo",
|
||||
tplfile="foo/boo.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__one_level_repeating():
|
||||
"""generate_sls_context - Basic one level with name same as dir
|
||||
|
||||
(Issue #56410)
|
||||
"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/foo.sls",
|
||||
"foo.foo",
|
||||
tplfile="foo/foo.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__two_level_init_implicit():
|
||||
"""generate_sls_context - Basic two level with implicit init.sls"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/bar/init.sls",
|
||||
"foo.bar",
|
||||
tplfile="foo/bar/init.sls",
|
||||
tpldir="foo/bar",
|
||||
tpldot="foo.bar",
|
||||
slsdotpath="foo.bar",
|
||||
slscolonpath="foo:bar",
|
||||
sls_path="foo_bar",
|
||||
slspath="foo/bar",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__two_level_init_explicit():
|
||||
"""generate_sls_context - Basic two level with explicit init.sls"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/bar/init.sls",
|
||||
"foo.bar.init",
|
||||
tplfile="foo/bar/init.sls",
|
||||
tpldir="foo/bar",
|
||||
tpldot="foo.bar",
|
||||
slsdotpath="foo.bar",
|
||||
slscolonpath="foo:bar",
|
||||
sls_path="foo_bar",
|
||||
slspath="foo/bar",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__two_level():
|
||||
"""generate_sls_context - Basic two level with name"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/bar/boo.sls",
|
||||
"foo.bar.boo",
|
||||
tplfile="foo/bar/boo.sls",
|
||||
tpldir="foo/bar",
|
||||
tpldot="foo.bar",
|
||||
slsdotpath="foo.bar",
|
||||
slscolonpath="foo:bar",
|
||||
sls_path="foo_bar",
|
||||
slspath="foo/bar",
|
||||
)
|
||||
|
||||
|
||||
def test_generate_sls_context__two_level_repeating():
|
||||
"""generate_sls_context - Basic two level with name same as dir
|
||||
|
||||
(Issue #56410)
|
||||
"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/foo/foo.sls",
|
||||
"foo.foo.foo",
|
||||
tplfile="foo/foo/foo.sls",
|
||||
tpldir="foo/foo",
|
||||
tpldot="foo.foo",
|
||||
slsdotpath="foo.foo",
|
||||
slscolonpath="foo:foo",
|
||||
sls_path="foo_foo",
|
||||
slspath="foo/foo",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
def test_generate_sls_context__backslash_in_path():
|
||||
"""generate_sls_context - Handle backslash in path on non-windows"""
|
||||
_test_generated_sls_context(
|
||||
"/tmp/foo/foo\\foo.sls",
|
||||
"foo.foo\\foo",
|
||||
tplfile="foo/foo\\foo.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
|
@ -6,10 +6,12 @@
|
|||
"""
|
||||
|
||||
import io
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
import requests
|
||||
|
||||
from salt.utils.aws import get_metadata
|
||||
import salt.utils.aws as aws
|
||||
from tests.support.mock import MagicMock, patch
|
||||
|
||||
|
||||
|
@ -19,7 +21,7 @@ def test_get_metadata_imdsv1():
|
|||
response.reason = "OK"
|
||||
response.raw = io.BytesIO(b"""test""")
|
||||
with patch("requests.get", return_value=response):
|
||||
result = get_metadata("/")
|
||||
result = aws.get_metadata("/")
|
||||
assert result.text == "test"
|
||||
|
||||
|
||||
|
@ -48,5 +50,76 @@ def test_get_metadata_imdsv2():
|
|||
with patch("requests.get", MagicMock(side_effect=handle_get_mock)), patch(
|
||||
"requests.put", return_value=put_response
|
||||
):
|
||||
result = get_metadata("/")
|
||||
result = aws.get_metadata("/")
|
||||
assert result.text == "test"
|
||||
|
||||
|
||||
def test_assumed_creds_not_updating_dictionary_while_iterating():
|
||||
mock_cache = {
|
||||
"expired": {
|
||||
"Expiration": time.mktime(datetime.utcnow().timetuple()),
|
||||
},
|
||||
"not_expired_1": {
|
||||
"Expiration": time.mktime(
|
||||
(datetime.utcnow() + timedelta(days=1)).timetuple()
|
||||
),
|
||||
"AccessKeyId": "mock_AccessKeyId",
|
||||
"SecretAccessKey": "mock_SecretAccessKey",
|
||||
"SessionToken": "mock_SessionToken",
|
||||
},
|
||||
"not_expired_2": {
|
||||
"Expiration": time.mktime(
|
||||
(datetime.utcnow() + timedelta(seconds=300)).timetuple()
|
||||
),
|
||||
},
|
||||
}
|
||||
with patch.dict(aws.__AssumeCache__, mock_cache):
|
||||
ret = aws.assumed_creds({}, "not_expired_1")
|
||||
assert "expired" not in aws.__AssumeCache__
|
||||
assert ret == ("mock_AccessKeyId", "mock_SecretAccessKey", "mock_SessionToken")
|
||||
|
||||
|
||||
def test_assumed_creds_deletes_expired_key():
|
||||
mock_cache = {
|
||||
"expired": {
|
||||
"Expiration": time.mktime(datetime.utcnow().timetuple()),
|
||||
},
|
||||
"not_expired_1": {
|
||||
"Expiration": time.mktime(
|
||||
(datetime.utcnow() + timedelta(days=1)).timetuple()
|
||||
),
|
||||
"AccessKeyId": "mock_AccessKeyId",
|
||||
"SecretAccessKey": "mock_SecretAccessKey",
|
||||
"SessionToken": "mock_SessionToken",
|
||||
},
|
||||
"not_expired_2": {
|
||||
"Expiration": time.mktime(
|
||||
(datetime.utcnow() + timedelta(seconds=300)).timetuple()
|
||||
),
|
||||
},
|
||||
}
|
||||
creds_dict = {
|
||||
"AccessKeyId": "mock_AccessKeyId",
|
||||
"SecretAccessKey": "mock_SecretAccessKey",
|
||||
"SessionToken": "mock_SessionToken",
|
||||
}
|
||||
response_mock = MagicMock()
|
||||
response_mock.status_code = 200
|
||||
response_mock.json.return_value = {
|
||||
"AssumeRoleResponse": {
|
||||
"AssumeRoleResult": {
|
||||
"Credentials": creds_dict,
|
||||
},
|
||||
},
|
||||
}
|
||||
with patch.dict(aws.__AssumeCache__, mock_cache):
|
||||
with patch.object(aws, "sig4", return_value=({}, "fakeurl.com")):
|
||||
with patch("requests.request", return_value=response_mock):
|
||||
ret = aws.assumed_creds({}, "expired")
|
||||
assert "expired" in aws.__AssumeCache__
|
||||
assert aws.__AssumeCache__["expired"] == creds_dict
|
||||
assert ret == (
|
||||
"mock_AccessKeyId",
|
||||
"mock_SecretAccessKey",
|
||||
"mock_SessionToken",
|
||||
)
|
||||
|
|
28
tests/pytests/unit/utils/test_mako.py
Normal file
28
tests/pytests/unit/utils/test_mako.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
import pytest
|
||||
|
||||
from tests.support.mock import Mock, call, patch
|
||||
|
||||
pytest.importorskip("mako")
|
||||
|
||||
# This import needs to be after the above importorskip so that no ImportError
|
||||
# is raised if Mako is not installed
|
||||
from salt.utils.mako import SaltMakoTemplateLookup
|
||||
|
||||
|
||||
def test_mako_template_lookup(minion_opts):
|
||||
"""
|
||||
The shudown method can be called without raising an exception when the
|
||||
file_client does not have a destroy method
|
||||
"""
|
||||
# Test SaltCacheLoader creating and destroying the file client created
|
||||
file_client = Mock()
|
||||
with patch("salt.fileclient.get_file_client", return_value=file_client):
|
||||
loader = SaltMakoTemplateLookup(minion_opts)
|
||||
assert loader._file_client is None
|
||||
assert loader.file_client() is file_client
|
||||
assert loader._file_client is file_client
|
||||
try:
|
||||
loader.destroy()
|
||||
except AttributeError:
|
||||
pytest.fail("Regression when calling SaltMakoTemplateLookup.destroy()")
|
||||
assert file_client.mock_calls == [call.destroy()]
|
|
@ -1,745 +0,0 @@
|
|||
# pylint: disable=unused-argument
|
||||
|
||||
|
||||
import salt.states.jboss7 as jboss7
|
||||
from salt.exceptions import CommandExecutionError
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class JBoss7StateTestCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
return {
|
||||
jboss7: {
|
||||
"__salt__": {
|
||||
"jboss7.read_datasource": MagicMock(),
|
||||
"jboss7.create_datasource": MagicMock(),
|
||||
"jboss7.update_datasource": MagicMock(),
|
||||
"jboss7.remove_datasource": MagicMock(),
|
||||
"jboss7.read_simple_binding": MagicMock(),
|
||||
"jboss7.create_simple_binding": MagicMock(),
|
||||
"jboss7.update_simple_binding": MagicMock(),
|
||||
"jboss7.undeploy": MagicMock(),
|
||||
"jboss7.deploy": MagicMock,
|
||||
"file.get_managed": MagicMock,
|
||||
"file.manage_file": MagicMock,
|
||||
"jboss7.list_deployments": MagicMock,
|
||||
},
|
||||
"__env__": "base",
|
||||
}
|
||||
}
|
||||
|
||||
def test_should_not_redeploy_unchanged(self):
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": False,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": False}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
self.assertFalse(jboss7_undeploy_mock.called)
|
||||
self.assertFalse(jboss7_deploy_mock.called)
|
||||
|
||||
def test_should_redeploy_changed(self):
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": False,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": True}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
self.assertTrue(jboss7_undeploy_mock.called)
|
||||
self.assertTrue(jboss7_deploy_mock.called)
|
||||
|
||||
def test_should_deploy_different_artifact(self):
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": False,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_other_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": False}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
self.assertFalse(jboss7_undeploy_mock.called)
|
||||
self.assertTrue(jboss7_deploy_mock.called)
|
||||
|
||||
def test_should_redploy_undeploy_force(self):
|
||||
# given
|
||||
parameters = {
|
||||
"target_file": "some_artifact",
|
||||
"undeploy_force": True,
|
||||
"undeploy": "some_artifact",
|
||||
"source": "some_artifact_on_master",
|
||||
}
|
||||
jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"}
|
||||
|
||||
def list_deployments(jboss_config):
|
||||
return ["some_artifact"]
|
||||
|
||||
def file_get_managed(
|
||||
name,
|
||||
template,
|
||||
source,
|
||||
source_hash,
|
||||
source_hash_name,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
context,
|
||||
defaults,
|
||||
skip_verify,
|
||||
kwargs,
|
||||
):
|
||||
return "sfn", "hash", ""
|
||||
|
||||
def file_manage_file(
|
||||
name,
|
||||
sfn,
|
||||
ret,
|
||||
source,
|
||||
source_sum,
|
||||
user,
|
||||
group,
|
||||
mode,
|
||||
attrs,
|
||||
saltenv,
|
||||
backup,
|
||||
makedirs,
|
||||
template,
|
||||
show_diff,
|
||||
contents,
|
||||
dir_mode,
|
||||
):
|
||||
return {"result": True, "changes": False}
|
||||
|
||||
jboss7_undeploy_mock = MagicMock()
|
||||
jboss7_deploy_mock = MagicMock()
|
||||
file_get_managed = MagicMock(side_effect=file_get_managed)
|
||||
file_manage_file = MagicMock(side_effect=file_manage_file)
|
||||
list_deployments_mock = MagicMock(side_effect=list_deployments)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.undeploy": jboss7_undeploy_mock,
|
||||
"jboss7.deploy": jboss7_deploy_mock,
|
||||
"file.get_managed": file_get_managed,
|
||||
"file.manage_file": file_manage_file,
|
||||
"jboss7.list_deployments": list_deployments_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.deployed(
|
||||
name="unchanged", jboss_config=jboss_conf, salt_source=parameters
|
||||
)
|
||||
|
||||
# then
|
||||
self.assertTrue(jboss7_undeploy_mock.called)
|
||||
self.assertTrue(jboss7_deploy_mock.called)
|
||||
|
||||
def test_should_create_new_datasource_if_not_exists(self):
|
||||
# given
|
||||
datasource_properties = {"connection-url": "jdbc:/old-connection-url"}
|
||||
ds_status = {"created": False}
|
||||
|
||||
def read_func(jboss_config, name, profile):
|
||||
if ds_status["created"]:
|
||||
return {"success": True, "result": datasource_properties}
|
||||
else:
|
||||
return {"success": False, "err_code": "JBAS014807"}
|
||||
|
||||
def create_func(jboss_config, name, datasource_properties, profile):
|
||||
ds_status["created"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock(side_effect=create_func)
|
||||
update_mock = MagicMock()
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
|
||||
# when
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties=datasource_properties,
|
||||
profile=None,
|
||||
)
|
||||
|
||||
# then
|
||||
create_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties=datasource_properties,
|
||||
profile=None,
|
||||
)
|
||||
|
||||
self.assertFalse(update_mock.called)
|
||||
self.assertEqual(result["comment"], "Datasource created.")
|
||||
|
||||
def test_should_update_the_datasource_if_exists(self):
|
||||
ds_status = {"updated": False}
|
||||
|
||||
def read_func(jboss_config, name, profile):
|
||||
if ds_status["updated"]:
|
||||
return {
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/new-connection-url"},
|
||||
}
|
||||
else:
|
||||
return {
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/old-connection-url"},
|
||||
}
|
||||
|
||||
def update_func(jboss_config, name, new_properties, profile):
|
||||
ds_status["updated"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock(side_effect=update_func)
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/new-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
|
||||
update_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
new_properties={"connection-url": "jdbc:/new-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
self.assertTrue(read_mock.called)
|
||||
self.assertEqual(result["comment"], "Datasource updated.")
|
||||
|
||||
def test_should_recreate_the_datasource_if_specified(self):
|
||||
read_mock = MagicMock(
|
||||
return_value={
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/same-connection-url"},
|
||||
}
|
||||
)
|
||||
create_mock = MagicMock(return_value={"success": True})
|
||||
remove_mock = MagicMock(return_value={"success": True})
|
||||
update_mock = MagicMock()
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.remove_datasource": remove_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/same-connection-url"},
|
||||
recreate=True,
|
||||
)
|
||||
|
||||
remove_mock.assert_called_with(name="appDS", jboss_config={}, profile=None)
|
||||
create_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/same-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
self.assertEqual(result["changes"]["removed"], "appDS")
|
||||
self.assertEqual(result["changes"]["created"], "appDS")
|
||||
|
||||
def test_should_inform_if_the_datasource_has_not_changed(self):
|
||||
read_mock = MagicMock(
|
||||
return_value={
|
||||
"success": True,
|
||||
"result": {"connection-url": "jdbc:/same-connection-url"},
|
||||
}
|
||||
)
|
||||
create_mock = MagicMock()
|
||||
remove_mock = MagicMock()
|
||||
update_mock = MagicMock(return_value={"success": True})
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_mock,
|
||||
"jboss7.create_datasource": create_mock,
|
||||
"jboss7.remove_datasource": remove_mock,
|
||||
"jboss7.update_datasource": update_mock,
|
||||
},
|
||||
):
|
||||
result = jboss7.datasource_exists(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
datasource_properties={"connection-url": "jdbc:/old-connection-url"},
|
||||
)
|
||||
|
||||
update_mock.assert_called_with(
|
||||
name="appDS",
|
||||
jboss_config={},
|
||||
new_properties={"connection-url": "jdbc:/old-connection-url"},
|
||||
profile=None,
|
||||
)
|
||||
self.assertFalse(create_mock.called)
|
||||
self.assertEqual(result["comment"], "Datasource not changed.")
|
||||
|
||||
def test_should_create_binding_if_not_exists(self):
|
||||
# given
|
||||
binding_status = {"created": False}
|
||||
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
if binding_status["created"]:
|
||||
return {"success": True, "result": {"value": "DEV"}}
|
||||
else:
|
||||
return {"success": False, "err_code": "JBAS014807"}
|
||||
|
||||
def create_func(jboss_config, binding_name, value, profile):
|
||||
binding_status["created"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock(side_effect=create_func)
|
||||
update_mock = MagicMock()
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
|
||||
# when
|
||||
result = jboss7.bindings_exist(
|
||||
name="bindings", jboss_config={}, bindings={"env": "DEV"}, profile=None
|
||||
)
|
||||
|
||||
# then
|
||||
create_mock.assert_called_with(
|
||||
jboss_config={}, binding_name="env", value="DEV", profile=None
|
||||
)
|
||||
self.assertEqual(update_mock.call_count, 0)
|
||||
self.assertEqual(result["changes"], {"added": "env:DEV\n"})
|
||||
self.assertEqual(result["comment"], "Bindings changed.")
|
||||
|
||||
def test_should_update_bindings_if_exists_and_different(self):
|
||||
# given
|
||||
binding_status = {"updated": False}
|
||||
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
if binding_status["updated"]:
|
||||
return {"success": True, "result": {"value": "DEV2"}}
|
||||
else:
|
||||
return {"success": True, "result": {"value": "DEV"}}
|
||||
|
||||
def update_func(jboss_config, binding_name, value, profile):
|
||||
binding_status["updated"] = True
|
||||
return {"success": True}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock(side_effect=update_func)
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.bindings_exist(
|
||||
name="bindings", jboss_config={}, bindings={"env": "DEV2"}, profile=None
|
||||
)
|
||||
|
||||
# then
|
||||
update_mock.assert_called_with(
|
||||
jboss_config={}, binding_name="env", value="DEV2", profile=None
|
||||
)
|
||||
self.assertEqual(create_mock.call_count, 0)
|
||||
self.assertEqual(result["changes"], {"changed": "env:DEV->DEV2\n"})
|
||||
self.assertEqual(result["comment"], "Bindings changed.")
|
||||
|
||||
def test_should_not_update_bindings_if_same(self):
|
||||
# given
|
||||
read_mock = MagicMock(
|
||||
return_value={"success": True, "result": {"value": "DEV2"}}
|
||||
)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock()
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
result = jboss7.bindings_exist(
|
||||
name="bindings", jboss_config={}, bindings={"env": "DEV2"}
|
||||
)
|
||||
|
||||
# then
|
||||
self.assertEqual(create_mock.call_count, 0)
|
||||
self.assertEqual(update_mock.call_count, 0)
|
||||
self.assertEqual(result["changes"], {})
|
||||
self.assertEqual(result["comment"], "Bindings not changed.")
|
||||
|
||||
def test_should_raise_exception_if_cannot_create_binding(self):
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
return {"success": False, "err_code": "JBAS014807"}
|
||||
|
||||
def create_func(jboss_config, binding_name, value, profile):
|
||||
return {"success": False, "failure-description": "Incorrect binding name."}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock(side_effect=create_func)
|
||||
update_mock = MagicMock()
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
# when
|
||||
try:
|
||||
jboss7.bindings_exist(
|
||||
name="bindings",
|
||||
jboss_config={},
|
||||
bindings={"env": "DEV2"},
|
||||
profile=None,
|
||||
)
|
||||
self.fail("An exception should be thrown")
|
||||
except CommandExecutionError as e:
|
||||
self.assertEqual(str(e), "Incorrect binding name.")
|
||||
|
||||
def test_should_raise_exception_if_cannot_update_binding(self):
|
||||
def read_func(jboss_config, binding_name, profile):
|
||||
return {"success": True, "result": {"value": "DEV"}}
|
||||
|
||||
def update_func(jboss_config, binding_name, value, profile):
|
||||
return {"success": False, "failure-description": "Incorrect binding name."}
|
||||
|
||||
read_mock = MagicMock(side_effect=read_func)
|
||||
create_mock = MagicMock()
|
||||
update_mock = MagicMock(side_effect=update_func)
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_simple_binding": read_mock,
|
||||
"jboss7.create_simple_binding": create_mock,
|
||||
"jboss7.update_simple_binding": update_mock,
|
||||
},
|
||||
):
|
||||
|
||||
# when
|
||||
try:
|
||||
jboss7.bindings_exist(
|
||||
name="bindings",
|
||||
jboss_config={},
|
||||
bindings={"env": "!@#!///some weird value"},
|
||||
profile=None,
|
||||
)
|
||||
self.fail("An exception should be thrown")
|
||||
except CommandExecutionError as e:
|
||||
self.assertEqual(str(e), "Incorrect binding name.")
|
||||
|
||||
def test_datasource_exist_create_datasource_good_code(self):
|
||||
jboss_config = {
|
||||
"cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh",
|
||||
"controller": "127.0.0.1: 9990",
|
||||
"cli_user": "user",
|
||||
"cli_password": "user",
|
||||
}
|
||||
|
||||
datasource_properties = {
|
||||
"driver - name": "h2",
|
||||
"connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2",
|
||||
"jndi - name": (
|
||||
"java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar"
|
||||
),
|
||||
"user - name": "user",
|
||||
"password": "user",
|
||||
"use - java - context": True,
|
||||
}
|
||||
|
||||
read_datasource = MagicMock(
|
||||
return_value={"success": False, "err_code": "WFLYCTL0216"}
|
||||
)
|
||||
|
||||
error_msg = "Error: -1"
|
||||
create_datasource = MagicMock(
|
||||
return_value={"success": False, "stdout": error_msg}
|
||||
)
|
||||
|
||||
with patch.dict(
|
||||
jboss7.__salt__,
|
||||
{
|
||||
"jboss7.read_datasource": read_datasource,
|
||||
"jboss7.create_datasource": create_datasource,
|
||||
},
|
||||
):
|
||||
ret = jboss7.datasource_exists("SQL", jboss_config, datasource_properties)
|
||||
|
||||
self.assertTrue("result" in ret)
|
||||
self.assertFalse(ret["result"])
|
||||
self.assertTrue("comment" in ret)
|
||||
self.assertTrue(error_msg in ret["comment"])
|
||||
|
||||
read_datasource.assert_called_once()
|
||||
create_datasource.assert_called_once()
|
||||
|
||||
def test_datasource_exist_create_datasource_bad_code(self):
|
||||
jboss_config = {
|
||||
"cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh",
|
||||
"controller": "127.0.0.1: 9990",
|
||||
"cli_user": "user",
|
||||
"cli_password": "user",
|
||||
}
|
||||
|
||||
datasource_properties = {
|
||||
"driver - name": "h2",
|
||||
"connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2",
|
||||
"jndi - name": (
|
||||
"java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar"
|
||||
),
|
||||
"user - name": "user",
|
||||
"password": "user",
|
||||
"use - java - context": True,
|
||||
}
|
||||
|
||||
read_datasource = MagicMock(
|
||||
return_value={
|
||||
"success": False,
|
||||
"err_code": "WFLYCTL0217",
|
||||
"failure-description": "Something happened",
|
||||
}
|
||||
)
|
||||
|
||||
with patch.dict(jboss7.__salt__, {"jboss7.read_datasource": read_datasource}):
|
||||
self.assertRaises(
|
||||
CommandExecutionError,
|
||||
jboss7.datasource_exists,
|
||||
"SQL",
|
||||
jboss_config,
|
||||
datasource_properties,
|
||||
)
|
||||
read_datasource.assert_called_once()
|
|
@ -1,897 +0,0 @@
|
|||
"""
|
||||
:codeauthor: :email:`Jeff Schroeder <jeffschroeder@computer.org>`
|
||||
"""
|
||||
|
||||
import base64
|
||||
from contextlib import contextmanager
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.modules.kubernetesmod as kubernetesmod
|
||||
import salt.states.kubernetes as kubernetes
|
||||
import salt.utils.stringutils
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.mock import MagicMock, patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
@pytest.mark.skipif(
|
||||
kubernetesmod.HAS_LIBS is False,
|
||||
reason="Probably Kubernetes client lib is not installed. Skipping test_kubernetes.py",
|
||||
)
|
||||
class KubernetesTestCase(TestCase, LoaderModuleMockMixin):
|
||||
"""
|
||||
Test cases for salt.states.kubernetes
|
||||
"""
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {kubernetes: {"__env__": "base"}}
|
||||
|
||||
@contextmanager
|
||||
def mock_func(self, func_name, return_value, test=False):
|
||||
"""
|
||||
Mock any of the kubernetes state function return values and set
|
||||
the test options.
|
||||
"""
|
||||
name = "kubernetes.{}".format(func_name)
|
||||
mocked = {name: MagicMock(return_value=return_value)}
|
||||
with patch.dict(kubernetes.__salt__, mocked) as patched:
|
||||
with patch.dict(kubernetes.__opts__, {"test": test}):
|
||||
yield patched
|
||||
|
||||
def make_configmap(self, name, namespace="default", data=None):
|
||||
return self.make_ret_dict(
|
||||
kind="ConfigMap",
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
data=data,
|
||||
)
|
||||
|
||||
def make_secret(self, name, namespace="default", data=None):
|
||||
secret_data = self.make_ret_dict(
|
||||
kind="Secret",
|
||||
name=name,
|
||||
namespace=namespace,
|
||||
data=data,
|
||||
)
|
||||
# Base64 all of the values just like kubectl does
|
||||
for key, value in secret_data["data"].items():
|
||||
secret_data["data"][key] = base64.b64encode(
|
||||
salt.utils.stringutils.to_bytes(value)
|
||||
)
|
||||
|
||||
return secret_data
|
||||
|
||||
def make_node_labels(self, name="minikube"):
|
||||
return {
|
||||
"kubernetes.io/hostname": name,
|
||||
"beta.kubernetes.io/os": "linux",
|
||||
"beta.kubernetes.io/arch": "amd64",
|
||||
"failure-domain.beta.kubernetes.io/region": "us-west-1",
|
||||
}
|
||||
|
||||
def make_node(self, name="minikube"):
|
||||
node_data = self.make_ret_dict(kind="Node", name="minikube")
|
||||
node_data.update(
|
||||
{
|
||||
"api_version": "v1",
|
||||
"kind": "Node",
|
||||
"metadata": {
|
||||
"annotations": {"node.alpha.kubernetes.io/ttl": "0"},
|
||||
"labels": self.make_node_labels(name=name),
|
||||
"name": name,
|
||||
"namespace": None,
|
||||
"self_link": "/api/v1/nodes/{name}".format(name=name),
|
||||
"uid": "7811b8ae-c1a1-11e7-a55a-0800279fb61e",
|
||||
},
|
||||
"spec": {"external_id": name},
|
||||
"status": {},
|
||||
}
|
||||
)
|
||||
return node_data
|
||||
|
||||
def make_namespace(self, name="default"):
|
||||
namespace_data = self.make_ret_dict(kind="Namespace", name=name)
|
||||
del namespace_data["data"]
|
||||
namespace_data.update(
|
||||
{
|
||||
"status": {"phase": "Active"},
|
||||
"spec": {"finalizers": ["kubernetes"]},
|
||||
"metadata": {
|
||||
"name": name,
|
||||
"namespace": None,
|
||||
"labels": None,
|
||||
"self_link": "/api/v1/namespaces/{namespace}".format(
|
||||
namespace=name,
|
||||
),
|
||||
"annotations": None,
|
||||
"uid": "752fceeb-c1a1-11e7-a55a-0800279fb61e",
|
||||
},
|
||||
}
|
||||
)
|
||||
return namespace_data
|
||||
|
||||
def make_ret_dict(self, kind, name, namespace=None, data=None):
|
||||
"""
|
||||
Make a minimal example configmap or secret for using in mocks
|
||||
"""
|
||||
|
||||
assert kind in ("Secret", "ConfigMap", "Namespace", "Node")
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
self_link = "/api/v1/namespaces/{namespace}/{kind}s/{name}".format(
|
||||
namespace=namespace,
|
||||
kind=kind.lower(),
|
||||
name=name,
|
||||
)
|
||||
|
||||
return_data = {
|
||||
"kind": kind,
|
||||
"data": data,
|
||||
"api_version": "v1",
|
||||
"metadata": {
|
||||
"name": name,
|
||||
"labels": None,
|
||||
"namespace": namespace,
|
||||
"self_link": self_link,
|
||||
"annotations": {"kubernetes.io/change-cause": "salt-call state.apply"},
|
||||
},
|
||||
}
|
||||
return return_data
|
||||
|
||||
def test_configmap_present__fail(self):
|
||||
error = kubernetes.configmap_present(
|
||||
name="testme",
|
||||
data={1: 1},
|
||||
source="salt://beyond/oblivion.jinja",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": False,
|
||||
"name": "testme",
|
||||
"comment": "'source' cannot be used in combination with 'data'",
|
||||
},
|
||||
error,
|
||||
)
|
||||
|
||||
def test_configmap_present__create_test_true(self):
|
||||
# Create a new configmap with test=True
|
||||
with self.mock_func("show_configmap", return_value=None, test=True):
|
||||
ret = kubernetes.configmap_present(
|
||||
name="example",
|
||||
data={"example.conf": "# empty config file"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "The configmap is going to be created",
|
||||
"changes": {},
|
||||
"name": "example",
|
||||
"result": None,
|
||||
},
|
||||
ret,
|
||||
)
|
||||
|
||||
def test_configmap_present__create(self):
|
||||
# Create a new configmap
|
||||
with self.mock_func("show_configmap", return_value=None):
|
||||
cm = self.make_configmap(
|
||||
name="test",
|
||||
namespace="default",
|
||||
data={"foo": "bar"},
|
||||
)
|
||||
with self.mock_func("create_configmap", return_value=cm):
|
||||
actual = kubernetes.configmap_present(
|
||||
name="test",
|
||||
data={"foo": "bar"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "",
|
||||
"changes": {"data": {"foo": "bar"}},
|
||||
"name": "test",
|
||||
"result": True,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_configmap_present__create_no_data(self):
|
||||
# Create a new configmap with no 'data' attribute
|
||||
with self.mock_func("show_configmap", return_value=None):
|
||||
cm = self.make_configmap(
|
||||
name="test",
|
||||
namespace="default",
|
||||
)
|
||||
with self.mock_func("create_configmap", return_value=cm):
|
||||
actual = kubernetes.configmap_present(name="test")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "",
|
||||
"changes": {"data": {}},
|
||||
"name": "test",
|
||||
"result": True,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_configmap_present__replace_test_true(self):
|
||||
cm = self.make_configmap(
|
||||
name="settings",
|
||||
namespace="saltstack",
|
||||
data={"foobar.conf": "# Example configuration"},
|
||||
)
|
||||
with self.mock_func("show_configmap", return_value=cm, test=True):
|
||||
ret = kubernetes.configmap_present(
|
||||
name="settings",
|
||||
namespace="saltstack",
|
||||
data={"foobar.conf": "# Example configuration"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "The configmap is going to be replaced",
|
||||
"changes": {},
|
||||
"name": "settings",
|
||||
"result": None,
|
||||
},
|
||||
ret,
|
||||
)
|
||||
|
||||
def test_configmap_present__replace(self):
|
||||
cm = self.make_configmap(name="settings", data={"action": "make=war"})
|
||||
# Replace an existing configmap
|
||||
with self.mock_func("show_configmap", return_value=cm):
|
||||
new_cm = cm.copy()
|
||||
new_cm.update({"data": {"action": "make=peace"}})
|
||||
with self.mock_func("replace_configmap", return_value=new_cm):
|
||||
actual = kubernetes.configmap_present(
|
||||
name="settings",
|
||||
data={"action": "make=peace"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": (
|
||||
"The configmap is already present. Forcing recreation"
|
||||
),
|
||||
"changes": {"data": {"action": "make=peace"}},
|
||||
"name": "settings",
|
||||
"result": True,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_configmap_absent__noop_test_true(self):
|
||||
# Nothing to delete with test=True
|
||||
with self.mock_func("show_configmap", return_value=None, test=True):
|
||||
actual = kubernetes.configmap_absent(name="NOT_FOUND")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "The configmap does not exist",
|
||||
"changes": {},
|
||||
"name": "NOT_FOUND",
|
||||
"result": None,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_configmap_absent__test_true(self):
|
||||
# Configmap exists with test=True
|
||||
cm = self.make_configmap(name="deleteme", namespace="default")
|
||||
with self.mock_func("show_configmap", return_value=cm, test=True):
|
||||
actual = kubernetes.configmap_absent(name="deleteme")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "The configmap is going to be deleted",
|
||||
"changes": {},
|
||||
"name": "deleteme",
|
||||
"result": None,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_configmap_absent__noop(self):
|
||||
# Nothing to delete
|
||||
with self.mock_func("show_configmap", return_value=None):
|
||||
actual = kubernetes.configmap_absent(name="NOT_FOUND")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "The configmap does not exist",
|
||||
"changes": {},
|
||||
"name": "NOT_FOUND",
|
||||
"result": True,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_configmap_absent(self):
|
||||
# Configmap exists, delete it!
|
||||
cm = self.make_configmap(name="deleteme", namespace="default")
|
||||
with self.mock_func("show_configmap", return_value=cm):
|
||||
# The return from this module isn't used in the state
|
||||
with self.mock_func("delete_configmap", return_value={}):
|
||||
actual = kubernetes.configmap_absent(name="deleteme")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "ConfigMap deleted",
|
||||
"changes": {
|
||||
"kubernetes.configmap": {
|
||||
"new": "absent",
|
||||
"old": "present",
|
||||
},
|
||||
},
|
||||
"name": "deleteme",
|
||||
"result": True,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_present__fail(self):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "monk3y"},
|
||||
source="salt://nope.jinja",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": False,
|
||||
"name": "sekret",
|
||||
"comment": "'source' cannot be used in combination with 'data'",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_present__exists_test_true(self):
|
||||
secret = self.make_secret(name="sekret")
|
||||
new_secret = secret.copy()
|
||||
new_secret.update({"data": {"password": "uncle"}})
|
||||
# Secret exists already and needs replacing with test=True
|
||||
with self.mock_func("show_secret", return_value=secret):
|
||||
with self.mock_func("replace_secret", return_value=new_secret, test=True):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "uncle"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "sekret",
|
||||
"comment": "The secret is going to be replaced",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_present__exists(self):
|
||||
# Secret exists and gets replaced
|
||||
secret = self.make_secret(name="sekret", data={"password": "booyah"})
|
||||
with self.mock_func("show_secret", return_value=secret):
|
||||
with self.mock_func("replace_secret", return_value=secret):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "booyah"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {"data": ["password"]},
|
||||
"result": True,
|
||||
"name": "sekret",
|
||||
"comment": "The secret is already present. Forcing recreation",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_present__create(self):
|
||||
# Secret exists and gets replaced
|
||||
secret = self.make_secret(name="sekret", data={"password": "booyah"})
|
||||
with self.mock_func("show_secret", return_value=None):
|
||||
with self.mock_func("create_secret", return_value=secret):
|
||||
actual = kubernetes.secret_present(
|
||||
name="sekret",
|
||||
data={"password": "booyah"},
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {"data": ["password"]},
|
||||
"result": True,
|
||||
"name": "sekret",
|
||||
"comment": "",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_present__create_no_data(self):
|
||||
# Secret exists and gets replaced
|
||||
secret = self.make_secret(name="sekret")
|
||||
with self.mock_func("show_secret", return_value=None):
|
||||
with self.mock_func("create_secret", return_value=secret):
|
||||
actual = kubernetes.secret_present(name="sekret")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {"data": []},
|
||||
"result": True,
|
||||
"name": "sekret",
|
||||
"comment": "",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_present__create_test_true(self):
|
||||
# Secret exists and gets replaced with test=True
|
||||
secret = self.make_secret(name="sekret")
|
||||
with self.mock_func("show_secret", return_value=None):
|
||||
with self.mock_func("create_secret", return_value=secret, test=True):
|
||||
actual = kubernetes.secret_present(name="sekret")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "sekret",
|
||||
"comment": "The secret is going to be created",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_absent__noop_test_true(self):
|
||||
with self.mock_func("show_secret", return_value=None, test=True):
|
||||
actual = kubernetes.secret_absent(name="sekret")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "sekret",
|
||||
"comment": "The secret does not exist",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_absent__noop(self):
|
||||
with self.mock_func("show_secret", return_value=None):
|
||||
actual = kubernetes.secret_absent(name="passwords")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "passwords",
|
||||
"comment": "The secret does not exist",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_absent__delete_test_true(self):
|
||||
secret = self.make_secret(name="credentials", data={"redis": "letmein"})
|
||||
with self.mock_func("show_secret", return_value=secret):
|
||||
with self.mock_func("delete_secret", return_value=secret, test=True):
|
||||
actual = kubernetes.secret_absent(name="credentials")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "credentials",
|
||||
"comment": "The secret is going to be deleted",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_secret_absent__delete(self):
|
||||
secret = self.make_secret(name="foobar", data={"redis": "letmein"})
|
||||
deleted = {
|
||||
"status": None,
|
||||
"kind": "Secret",
|
||||
"code": None,
|
||||
"reason": None,
|
||||
"details": None,
|
||||
"message": None,
|
||||
"api_version": "v1",
|
||||
"metadata": {
|
||||
"self_link": "/api/v1/namespaces/default/secrets/foobar",
|
||||
"resource_version": "30292",
|
||||
},
|
||||
}
|
||||
with self.mock_func("show_secret", return_value=secret):
|
||||
with self.mock_func("delete_secret", return_value=deleted):
|
||||
actual = kubernetes.secret_absent(name="foobar")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {
|
||||
"kubernetes.secret": {"new": "absent", "old": "present"},
|
||||
},
|
||||
"result": True,
|
||||
"name": "foobar",
|
||||
"comment": "Secret deleted",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_present__add_test_true(self):
|
||||
labels = self.make_node_labels()
|
||||
with self.mock_func("node_labels", return_value=labels, test=True):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="com.zoo-animal",
|
||||
node="minikube",
|
||||
value="monkey",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "com.zoo-animal",
|
||||
"comment": "The label is going to be set",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_present__add(self):
|
||||
node_data = self.make_node()
|
||||
# Remove some of the defaults to make it simpler
|
||||
node_data["metadata"]["labels"] = {
|
||||
"beta.kubernetes.io/os": "linux",
|
||||
}
|
||||
labels = node_data["metadata"]["labels"]
|
||||
|
||||
with self.mock_func("node_labels", return_value=labels):
|
||||
with self.mock_func("node_add_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/zone",
|
||||
node="minikube",
|
||||
value="us-central1-a",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"comment": "",
|
||||
"changes": {
|
||||
"minikube.failure-domain.beta.kubernetes.io/zone": {
|
||||
"new": {
|
||||
"failure-domain.beta.kubernetes.io/zone": (
|
||||
"us-central1-a"
|
||||
),
|
||||
"beta.kubernetes.io/os": "linux",
|
||||
},
|
||||
"old": {"beta.kubernetes.io/os": "linux"},
|
||||
},
|
||||
},
|
||||
"name": "failure-domain.beta.kubernetes.io/zone",
|
||||
"result": True,
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_present__already_set(self):
|
||||
node_data = self.make_node()
|
||||
labels = node_data["metadata"]["labels"]
|
||||
with self.mock_func("node_labels", return_value=labels):
|
||||
with self.mock_func("node_add_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
value="us-west-1",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": (
|
||||
"The label is already set and has the specified value"
|
||||
),
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_present__update_test_true(self):
|
||||
node_data = self.make_node()
|
||||
labels = node_data["metadata"]["labels"]
|
||||
with self.mock_func("node_labels", return_value=labels):
|
||||
with self.mock_func("node_add_label", return_value=node_data, test=True):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
value="us-east-1",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": "The label is going to be updated",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_present__update(self):
|
||||
node_data = self.make_node()
|
||||
# Remove some of the defaults to make it simpler
|
||||
node_data["metadata"]["labels"] = {
|
||||
"failure-domain.beta.kubernetes.io/region": "us-west-1",
|
||||
}
|
||||
labels = node_data["metadata"]["labels"]
|
||||
with self.mock_func("node_labels", return_value=labels):
|
||||
with self.mock_func("node_add_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_present(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
value="us-east-1",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {
|
||||
"minikube.failure-domain.beta.kubernetes.io/region": {
|
||||
"new": {
|
||||
"failure-domain.beta.kubernetes.io/region": (
|
||||
"us-east-1"
|
||||
)
|
||||
},
|
||||
"old": {
|
||||
"failure-domain.beta.kubernetes.io/region": (
|
||||
"us-west-1"
|
||||
)
|
||||
},
|
||||
}
|
||||
},
|
||||
"result": True,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": "The label is already set, changing the value",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_absent__noop_test_true(self):
|
||||
labels = self.make_node_labels()
|
||||
with self.mock_func("node_labels", return_value=labels, test=True):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="non-existent-label",
|
||||
node="minikube",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "non-existent-label",
|
||||
"comment": "The label does not exist",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_absent__noop(self):
|
||||
labels = self.make_node_labels()
|
||||
with self.mock_func("node_labels", return_value=labels):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="non-existent-label",
|
||||
node="minikube",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "non-existent-label",
|
||||
"comment": "The label does not exist",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_absent__delete_test_true(self):
|
||||
labels = self.make_node_labels()
|
||||
with self.mock_func("node_labels", return_value=labels, test=True):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
"comment": "The label is going to be deleted",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_node_label_absent__delete(self):
|
||||
node_data = self.make_node()
|
||||
labels = node_data["metadata"]["labels"].copy()
|
||||
|
||||
node_data["metadata"]["labels"].pop("failure-domain.beta.kubernetes.io/region")
|
||||
|
||||
with self.mock_func("node_labels", return_value=labels):
|
||||
with self.mock_func("node_remove_label", return_value=node_data):
|
||||
actual = kubernetes.node_label_absent(
|
||||
name="failure-domain.beta.kubernetes.io/region",
|
||||
node="minikube",
|
||||
)
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"result": True,
|
||||
"changes": {
|
||||
"kubernetes.node_label": {
|
||||
"new": "absent",
|
||||
"old": "present",
|
||||
}
|
||||
},
|
||||
"comment": "Label removed from node",
|
||||
"name": "failure-domain.beta.kubernetes.io/region",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_present__create_test_true(self):
|
||||
with self.mock_func("show_namespace", return_value=None, test=True):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "saltstack",
|
||||
"comment": "The namespace is going to be created",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_present__create(self):
|
||||
namespace_data = self.make_namespace(name="saltstack")
|
||||
with self.mock_func("show_namespace", return_value=None):
|
||||
with self.mock_func("create_namespace", return_value=namespace_data):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {"namespace": {"new": namespace_data, "old": {}}},
|
||||
"result": True,
|
||||
"name": "saltstack",
|
||||
"comment": "",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_present__noop_test_true(self):
|
||||
namespace_data = self.make_namespace(name="saltstack")
|
||||
with self.mock_func("show_namespace", return_value=namespace_data, test=True):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "saltstack",
|
||||
"comment": "The namespace already exists",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_present__noop(self):
|
||||
namespace_data = self.make_namespace(name="saltstack")
|
||||
with self.mock_func("show_namespace", return_value=namespace_data):
|
||||
actual = kubernetes.namespace_present(name="saltstack")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "saltstack",
|
||||
"comment": "The namespace already exists",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__noop_test_true(self):
|
||||
with self.mock_func("show_namespace", return_value=None, test=True):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "salt",
|
||||
"comment": "The namespace does not exist",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__noop(self):
|
||||
with self.mock_func("show_namespace", return_value=None):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "The namespace does not exist",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__delete_test_true(self):
|
||||
namespace_data = self.make_namespace(name="salt")
|
||||
with self.mock_func("show_namespace", return_value=namespace_data, test=True):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": None,
|
||||
"name": "salt",
|
||||
"comment": "The namespace is going to be deleted",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__delete_code_200(self):
|
||||
namespace_data = self.make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted["code"] = 200
|
||||
deleted.update({"code": 200, "message": None})
|
||||
with self.mock_func("show_namespace", return_value=namespace_data):
|
||||
with self.mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {
|
||||
"kubernetes.namespace": {"new": "absent", "old": "present"}
|
||||
},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "Terminating",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__delete_status_terminating(self):
|
||||
namespace_data = self.make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted.update(
|
||||
{
|
||||
"code": None,
|
||||
"status": "Terminating namespace",
|
||||
"message": "Terminating this shizzzle yo",
|
||||
}
|
||||
)
|
||||
with self.mock_func("show_namespace", return_value=namespace_data):
|
||||
with self.mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {
|
||||
"kubernetes.namespace": {"new": "absent", "old": "present"}
|
||||
},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "Terminating this shizzzle yo",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__delete_status_phase_terminating(self):
|
||||
# This is what kubernetes 1.8.0 looks like when deleting namespaces
|
||||
namespace_data = self.make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted.update(
|
||||
{"code": None, "message": None, "status": {"phase": "Terminating"}}
|
||||
)
|
||||
with self.mock_func("show_namespace", return_value=namespace_data):
|
||||
with self.mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {
|
||||
"kubernetes.namespace": {"new": "absent", "old": "present"}
|
||||
},
|
||||
"result": True,
|
||||
"name": "salt",
|
||||
"comment": "Terminating",
|
||||
},
|
||||
actual,
|
||||
)
|
||||
|
||||
def test_namespace_absent__delete_error(self):
|
||||
namespace_data = self.make_namespace(name="salt")
|
||||
deleted = namespace_data.copy()
|
||||
deleted.update({"code": 418, "message": "I' a teapot!", "status": None})
|
||||
with self.mock_func("show_namespace", return_value=namespace_data):
|
||||
with self.mock_func("delete_namespace", return_value=deleted):
|
||||
actual = kubernetes.namespace_absent(name="salt")
|
||||
self.assertDictEqual(
|
||||
{
|
||||
"changes": {},
|
||||
"result": False,
|
||||
"name": "salt",
|
||||
"comment": "Something went wrong, response: {}".format(
|
||||
deleted,
|
||||
),
|
||||
},
|
||||
actual,
|
||||
)
|
|
@ -1,110 +0,0 @@
|
|||
"""
|
||||
:codeauthor: :email: `Mike Place <mp@saltstack.com>`
|
||||
"""
|
||||
|
||||
|
||||
import io
|
||||
|
||||
from salt import template
|
||||
from tests.support.mock import MagicMock
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class TemplateTestCase(TestCase):
|
||||
|
||||
render_dict = {
|
||||
"jinja": "fake_jinja_func",
|
||||
"json": "fake_json_func",
|
||||
"mako": "fake_make_func",
|
||||
}
|
||||
|
||||
def test_compile_template_bad_type(self):
|
||||
"""
|
||||
Test to ensure that unsupported types cannot be passed to the template compiler
|
||||
"""
|
||||
ret = template.compile_template(["1", "2", "3"], None, None, None, None)
|
||||
self.assertDictEqual(ret, {})
|
||||
|
||||
def test_compile_template_preserves_windows_newlines(self):
|
||||
"""
|
||||
Test to ensure that a file with Windows newlines, when rendered by a
|
||||
template renderer, does not eat the CR character.
|
||||
"""
|
||||
|
||||
def _get_rend(renderer, value):
|
||||
"""
|
||||
We need a new MagicMock each time since we're dealing with StringIO
|
||||
objects which are read like files.
|
||||
"""
|
||||
return {renderer: MagicMock(return_value=io.StringIO(value))}
|
||||
|
||||
input_data_windows = "foo\r\nbar\r\nbaz\r\n"
|
||||
input_data_non_windows = input_data_windows.replace("\r\n", "\n")
|
||||
renderer = "test"
|
||||
blacklist = whitelist = []
|
||||
|
||||
ret = template.compile_template(
|
||||
":string:",
|
||||
_get_rend(renderer, input_data_non_windows),
|
||||
renderer,
|
||||
blacklist,
|
||||
whitelist,
|
||||
input_data=input_data_windows,
|
||||
).read()
|
||||
# Even though the mocked renderer returned a string without the windows
|
||||
# newlines, the compiled template should still have them.
|
||||
self.assertEqual(ret, input_data_windows)
|
||||
|
||||
# Now test that we aren't adding them in unnecessarily.
|
||||
ret = template.compile_template(
|
||||
":string:",
|
||||
_get_rend(renderer, input_data_non_windows),
|
||||
renderer,
|
||||
blacklist,
|
||||
whitelist,
|
||||
input_data=input_data_non_windows,
|
||||
).read()
|
||||
self.assertEqual(ret, input_data_non_windows)
|
||||
|
||||
# Finally, ensure that we're not unnecessarily replacing the \n with
|
||||
# \r\n in the event that the renderer returned a string with the
|
||||
# windows newlines intact.
|
||||
ret = template.compile_template(
|
||||
":string:",
|
||||
_get_rend(renderer, input_data_windows),
|
||||
renderer,
|
||||
blacklist,
|
||||
whitelist,
|
||||
input_data=input_data_windows,
|
||||
).read()
|
||||
self.assertEqual(ret, input_data_windows)
|
||||
|
||||
def test_check_render_pipe_str(self):
|
||||
"""
|
||||
Check that all renderers specified in the pipe string are available.
|
||||
"""
|
||||
ret = template.check_render_pipe_str("jinja|json", self.render_dict, None, None)
|
||||
self.assertIn(("fake_jinja_func", ""), ret)
|
||||
self.assertIn(("fake_json_func", ""), ret)
|
||||
self.assertNotIn(("OBVIOUSLY_NOT_HERE", ""), ret)
|
||||
|
||||
def test_check_renderer_blacklisting(self):
|
||||
"""
|
||||
Check that all renderers specified in the pipe string are available.
|
||||
"""
|
||||
ret = template.check_render_pipe_str(
|
||||
"jinja|json", self.render_dict, ["jinja"], None
|
||||
)
|
||||
self.assertListEqual([("fake_json_func", "")], ret)
|
||||
ret = template.check_render_pipe_str(
|
||||
"jinja|json", self.render_dict, None, ["jinja"]
|
||||
)
|
||||
self.assertListEqual([("fake_jinja_func", "")], ret)
|
||||
ret = template.check_render_pipe_str(
|
||||
"jinja|json", self.render_dict, ["jinja"], ["jinja"]
|
||||
)
|
||||
self.assertListEqual([], ret)
|
||||
ret = template.check_render_pipe_str(
|
||||
"jinja|json", self.render_dict, ["jinja"], ["jinja", "json"]
|
||||
)
|
||||
self.assertListEqual([("fake_json_func", "")], ret)
|
|
@ -1,440 +0,0 @@
|
|||
"""
|
||||
Unit tests for salt.utils.templates.py
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from collections import OrderedDict
|
||||
from pathlib import PurePath, PurePosixPath
|
||||
|
||||
import pytest
|
||||
|
||||
import salt.utils.files
|
||||
import salt.utils.templates
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.mock import patch
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
try:
|
||||
import Cheetah as _
|
||||
|
||||
HAS_CHEETAH = True
|
||||
except ImportError:
|
||||
HAS_CHEETAH = False
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RenderTestCase(TestCase):
|
||||
def setUp(self):
|
||||
# Default context for salt.utils.templates.render_*_tmpl to work
|
||||
self.context = {
|
||||
"opts": {"cachedir": "/D", "__cli": "salt"},
|
||||
"saltenv": None,
|
||||
}
|
||||
|
||||
### Tests for Jinja (whitespace-friendly)
|
||||
def test_render_jinja_sanity(self):
|
||||
tmpl = """OK"""
|
||||
res = salt.utils.templates.render_jinja_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
def test_render_jinja_evaluate(self):
|
||||
tmpl = """{{ "OK" }}"""
|
||||
res = salt.utils.templates.render_jinja_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
def test_render_jinja_evaluate_multi(self):
|
||||
tmpl = """{% if 1 -%}OK{%- endif %}"""
|
||||
res = salt.utils.templates.render_jinja_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
def test_render_jinja_variable(self):
|
||||
tmpl = """{{ var }}"""
|
||||
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
res = salt.utils.templates.render_jinja_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
def test_render_jinja_tojson_sorted(self):
|
||||
templ = """thing: {{ var|tojson(sort_keys=True) }}"""
|
||||
expected = """thing: {"x": "xxx", "y": "yyy", "z": "zzz"}"""
|
||||
|
||||
with patch.dict(self.context, {"var": {"z": "zzz", "y": "yyy", "x": "xxx"}}):
|
||||
res = salt.utils.templates.render_jinja_tmpl(templ, self.context)
|
||||
|
||||
assert res == expected
|
||||
|
||||
def test_render_jinja_tojson_unsorted(self):
|
||||
templ = """thing: {{ var|tojson(sort_keys=False) }}"""
|
||||
expected = """thing: {"z": "zzz", "x": "xxx", "y": "yyy"}"""
|
||||
|
||||
# Values must be added to the dict in the expected order. This is
|
||||
# only necessary for older Pythons that don't remember dict order.
|
||||
d = OrderedDict()
|
||||
d["z"] = "zzz"
|
||||
d["x"] = "xxx"
|
||||
d["y"] = "yyy"
|
||||
|
||||
with patch.dict(self.context, {"var": d}):
|
||||
res = salt.utils.templates.render_jinja_tmpl(templ, self.context)
|
||||
|
||||
assert res == expected
|
||||
|
||||
### Tests for mako template
|
||||
def test_render_mako_sanity(self):
|
||||
tmpl = """OK"""
|
||||
res = salt.utils.templates.render_mako_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
def test_render_mako_evaluate(self):
|
||||
tmpl = """${ "OK" }"""
|
||||
res = salt.utils.templates.render_mako_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
def test_render_mako_evaluate_multi(self):
|
||||
tmpl = """
|
||||
% if 1:
|
||||
OK
|
||||
% endif
|
||||
"""
|
||||
res = salt.utils.templates.render_mako_tmpl(tmpl, dict(self.context))
|
||||
stripped = res.strip()
|
||||
self.assertEqual(stripped, "OK")
|
||||
|
||||
def test_render_mako_variable(self):
|
||||
tmpl = """${ var }"""
|
||||
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
res = salt.utils.templates.render_mako_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
### Tests for wempy template
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info > (3,),
|
||||
reason="The wempy module is currently unsupported under Python3",
|
||||
)
|
||||
def test_render_wempy_sanity(self):
|
||||
tmpl = """OK"""
|
||||
res = salt.utils.templates.render_wempy_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info > (3,),
|
||||
reason="The wempy module is currently unsupported under Python3",
|
||||
)
|
||||
def test_render_wempy_evaluate(self):
|
||||
tmpl = """{{="OK"}}"""
|
||||
res = salt.utils.templates.render_wempy_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info > (3,),
|
||||
reason="The wempy module is currently unsupported under Python3",
|
||||
)
|
||||
def test_render_wempy_evaluate_multi(self):
|
||||
tmpl = """{{if 1:}}OK{{pass}}"""
|
||||
res = salt.utils.templates.render_wempy_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
@pytest.mark.skipif(
|
||||
sys.version_info > (3,),
|
||||
reason="The wempy module is currently unsupported under Python3",
|
||||
)
|
||||
def test_render_wempy_variable(self):
|
||||
tmpl = """{{=var}}"""
|
||||
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
res = salt.utils.templates.render_wempy_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
### Tests for genshi template (xml-based)
|
||||
def test_render_genshi_sanity(self):
|
||||
tmpl = """<RU>OK</RU>"""
|
||||
res = salt.utils.templates.render_genshi_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "<RU>OK</RU>")
|
||||
|
||||
def test_render_genshi_evaluate(self):
|
||||
tmpl = """<RU>${ "OK" }</RU>"""
|
||||
res = salt.utils.templates.render_genshi_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "<RU>OK</RU>")
|
||||
|
||||
def test_render_genshi_evaluate_condition(self):
|
||||
tmpl = """<RU xmlns:py="http://genshi.edgewall.org/" py:if="1">OK</RU>"""
|
||||
res = salt.utils.templates.render_genshi_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "<RU>OK</RU>")
|
||||
|
||||
def test_render_genshi_variable(self):
|
||||
tmpl = """<RU>$var</RU>"""
|
||||
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
res = salt.utils.templates.render_genshi_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res, "<RU>OK</RU>")
|
||||
|
||||
def test_render_genshi_variable_replace(self):
|
||||
tmpl = """<RU xmlns:py="http://genshi.edgewall.org/" py:content="var">not ok</RU>"""
|
||||
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
res = salt.utils.templates.render_genshi_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res, "<RU>OK</RU>")
|
||||
|
||||
### Tests for cheetah template (line-oriented and xml-friendly)
|
||||
@pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.")
|
||||
def test_render_cheetah_sanity(self):
|
||||
tmpl = """OK"""
|
||||
res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
@pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.")
|
||||
def test_render_cheetah_evaluate(self):
|
||||
tmpl = """<%="OK"%>"""
|
||||
res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context))
|
||||
self.assertEqual(res, "OK")
|
||||
|
||||
@pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.")
|
||||
def test_render_cheetah_evaluate_xml(self):
|
||||
tmpl = """
|
||||
<% if 1: %>
|
||||
OK
|
||||
<% pass %>
|
||||
"""
|
||||
res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context))
|
||||
stripped = res.strip()
|
||||
self.assertEqual(stripped, "OK")
|
||||
|
||||
@pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.")
|
||||
def test_render_cheetah_evaluate_text(self):
|
||||
tmpl = """
|
||||
#if 1
|
||||
OK
|
||||
#end if
|
||||
"""
|
||||
|
||||
res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context))
|
||||
stripped = res.strip()
|
||||
self.assertEqual(stripped, "OK")
|
||||
|
||||
@pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.")
|
||||
def test_render_cheetah_variable(self):
|
||||
tmpl = """$var"""
|
||||
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
res = salt.utils.templates.render_cheetah_tmpl(tmpl, ctx)
|
||||
self.assertEqual(res.strip(), "OK")
|
||||
|
||||
def test_render_jinja_cve_2021_25283(self):
|
||||
tmpl = """{{ [].__class__ }}"""
|
||||
ctx = dict(self.context)
|
||||
ctx["var"] = "OK"
|
||||
with pytest.raises(salt.exceptions.SaltRenderError):
|
||||
res = salt.utils.templates.render_jinja_tmpl(tmpl, ctx)
|
||||
|
||||
|
||||
class MockRender:
|
||||
def __call__(self, tplstr, context, tmplpath=None):
|
||||
self.tplstr = tplstr
|
||||
self.context = context
|
||||
self.tmplpath = tmplpath
|
||||
return tplstr
|
||||
|
||||
|
||||
class WrapRenderTestCase(TestCase):
|
||||
def assertDictContainsAll(self, actual, **expected):
|
||||
"""Make sure dictionary contains at least all expected values"""
|
||||
actual = {key: actual[key] for key in expected if key in actual}
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def _test_generated_sls_context(self, tmplpath, sls, **expected):
|
||||
"""Generic SLS Context Test"""
|
||||
# DeNormalize tmplpath
|
||||
tmplpath = str(PurePath(PurePosixPath(tmplpath)))
|
||||
if tmplpath.startswith("\\"):
|
||||
tmplpath = "C:{}".format(tmplpath)
|
||||
expected["tplpath"] = tmplpath
|
||||
actual = salt.utils.templates.generate_sls_context(tmplpath, sls)
|
||||
self.assertDictContainsAll(actual, **expected)
|
||||
|
||||
@patch("salt.utils.templates.generate_sls_context")
|
||||
@with_tempdir()
|
||||
def test_sls_context_call(self, tempdir, generate_sls_context):
|
||||
"""Check that generate_sls_context is called with proper parameters"""
|
||||
sls = "foo.bar"
|
||||
tmplpath = "/tmp/foo/bar.sls"
|
||||
|
||||
slsfile = os.path.join(tempdir, "foo")
|
||||
with salt.utils.files.fopen(slsfile, "w") as fp:
|
||||
fp.write("{{ slspath }}")
|
||||
context = {"opts": {}, "saltenv": "base", "sls": sls}
|
||||
render = MockRender()
|
||||
wrapped = salt.utils.templates.wrap_tmpl_func(render)
|
||||
res = wrapped(slsfile, context=context, tmplpath=tmplpath)
|
||||
generate_sls_context.assert_called_with(tmplpath, sls)
|
||||
|
||||
@patch("salt.utils.templates.generate_sls_context")
|
||||
@with_tempdir()
|
||||
def test_sls_context_no_call(self, tempdir, generate_sls_context):
|
||||
"""Check that generate_sls_context is not called if sls is not set"""
|
||||
sls = "foo.bar"
|
||||
tmplpath = "/tmp/foo/bar.sls"
|
||||
|
||||
slsfile = os.path.join(tempdir, "foo")
|
||||
with salt.utils.files.fopen(slsfile, "w") as fp:
|
||||
fp.write("{{ slspath }}")
|
||||
context = {"opts": {}, "saltenv": "base"}
|
||||
render = MockRender()
|
||||
wrapped = salt.utils.templates.wrap_tmpl_func(render)
|
||||
res = wrapped(slsfile, context=context, tmplpath=tmplpath)
|
||||
generate_sls_context.assert_not_called()
|
||||
|
||||
def test_generate_sls_context__top_level(self):
|
||||
"""generate_sls_context - top_level Use case"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/boo.sls",
|
||||
"boo",
|
||||
tplfile="boo.sls",
|
||||
tpldir=".",
|
||||
tpldot="",
|
||||
slsdotpath="",
|
||||
slscolonpath="",
|
||||
sls_path="",
|
||||
slspath="",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__one_level_init_implicit(self):
|
||||
"""generate_sls_context - Basic one level with implicit init.sls"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/init.sls",
|
||||
"foo",
|
||||
tplfile="foo/init.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__one_level_init_explicit(self):
|
||||
"""generate_sls_context - Basic one level with explicit init.sls"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/init.sls",
|
||||
"foo.init",
|
||||
tplfile="foo/init.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__one_level(self):
|
||||
"""generate_sls_context - Basic one level with name"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/boo.sls",
|
||||
"foo.boo",
|
||||
tplfile="foo/boo.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__one_level_repeating(self):
|
||||
"""generate_sls_context - Basic one level with name same as dir
|
||||
|
||||
(Issue #56410)
|
||||
"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/foo.sls",
|
||||
"foo.foo",
|
||||
tplfile="foo/foo.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__two_level_init_implicit(self):
|
||||
"""generate_sls_context - Basic two level with implicit init.sls"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/bar/init.sls",
|
||||
"foo.bar",
|
||||
tplfile="foo/bar/init.sls",
|
||||
tpldir="foo/bar",
|
||||
tpldot="foo.bar",
|
||||
slsdotpath="foo.bar",
|
||||
slscolonpath="foo:bar",
|
||||
sls_path="foo_bar",
|
||||
slspath="foo/bar",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__two_level_init_explicit(self):
|
||||
"""generate_sls_context - Basic two level with explicit init.sls"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/bar/init.sls",
|
||||
"foo.bar.init",
|
||||
tplfile="foo/bar/init.sls",
|
||||
tpldir="foo/bar",
|
||||
tpldot="foo.bar",
|
||||
slsdotpath="foo.bar",
|
||||
slscolonpath="foo:bar",
|
||||
sls_path="foo_bar",
|
||||
slspath="foo/bar",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__two_level(self):
|
||||
"""generate_sls_context - Basic two level with name"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/bar/boo.sls",
|
||||
"foo.bar.boo",
|
||||
tplfile="foo/bar/boo.sls",
|
||||
tpldir="foo/bar",
|
||||
tpldot="foo.bar",
|
||||
slsdotpath="foo.bar",
|
||||
slscolonpath="foo:bar",
|
||||
sls_path="foo_bar",
|
||||
slspath="foo/bar",
|
||||
)
|
||||
|
||||
def test_generate_sls_context__two_level_repeating(self):
|
||||
"""generate_sls_context - Basic two level with name same as dir
|
||||
|
||||
(Issue #56410)
|
||||
"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/foo/foo.sls",
|
||||
"foo.foo.foo",
|
||||
tplfile="foo/foo/foo.sls",
|
||||
tpldir="foo/foo",
|
||||
tpldot="foo.foo",
|
||||
slsdotpath="foo.foo",
|
||||
slscolonpath="foo:foo",
|
||||
sls_path="foo_foo",
|
||||
slspath="foo/foo",
|
||||
)
|
||||
|
||||
@pytest.mark.skip_on_windows
|
||||
def test_generate_sls_context__backslash_in_path(self):
|
||||
"""generate_sls_context - Handle backslash in path on non-windows"""
|
||||
self._test_generated_sls_context(
|
||||
"/tmp/foo/foo\\foo.sls",
|
||||
"foo.foo\\foo",
|
||||
tplfile="foo/foo\\foo.sls",
|
||||
tpldir="foo",
|
||||
tpldot="foo",
|
||||
slsdotpath="foo",
|
||||
slscolonpath="foo",
|
||||
sls_path="foo",
|
||||
slspath="foo",
|
||||
)
|
Loading…
Add table
Reference in a new issue