mirror of
https://github.com/saltstack/salt.git
synced 2025-04-15 09:10:20 +00:00
fixes saltstack/salt#61549 allow roll-up of duplicate IDs with different names
This commit is contained in:
parent
9c5b4f2737
commit
7e1c2baa65
10 changed files with 590 additions and 4 deletions
1
changelog/61549.added
Normal file
1
changelog/61549.added
Normal file
|
@ -0,0 +1 @@
|
|||
Add feature to allow roll-up of duplicate IDs with different names in highstate output
|
|
@ -643,6 +643,10 @@
|
|||
# as a percent of total actions will be shown for each state run.
|
||||
#state_output_pct: False
|
||||
|
||||
# The state_compress_ids setting aggregates information about states which have
|
||||
# multiple "names" under the same state ID in the highstate output.
|
||||
#state_compress_ids: False
|
||||
|
||||
# Automatically aggregate all states that have support for mod_aggregate by
|
||||
# setting to 'True'. Or pass a list of state module names to automatically
|
||||
# aggregate just those types.
|
||||
|
|
|
@ -709,6 +709,10 @@
|
|||
# as a percent of total actions will be shown for each state run.
|
||||
#state_output_pct: False
|
||||
|
||||
# The state_compress_ids setting aggregates information about states which have
|
||||
# multiple "names" under the same state ID in the highstate output.
|
||||
#state_compress_ids: False
|
||||
|
||||
# Fingerprint of the master public key to validate the identity of your Salt master
|
||||
# before the initial key exchange. The master fingerprint can be found by running
|
||||
# "salt-key -f master.pub" on the Salt master.
|
||||
|
|
|
@ -510,6 +510,10 @@
|
|||
# as a percent of total actions will be shown for each state run.
|
||||
#state_output_pct: False
|
||||
|
||||
# The state_compress_ids setting aggregates information about states which have
|
||||
# multiple "names" under the same state ID in the highstate output.
|
||||
#state_compress_ids: False
|
||||
|
||||
# Fingerprint of the master public key to validate the identity of your Salt master
|
||||
# before the initial key exchange. The master fingerprint can be found by running
|
||||
# "salt-key -F master" on the Salt master.
|
||||
|
|
|
@ -578,6 +578,10 @@ syndic_user: salt
|
|||
# as a percent of total actions will be shown for each state run.
|
||||
#state_output_pct: False
|
||||
|
||||
# The state_compress_ids setting aggregates information about states which have
|
||||
# multiple "names" under the same state ID in the highstate output.
|
||||
#state_compress_ids: False
|
||||
|
||||
# Automatically aggregate all states that have support for mod_aggregate by
|
||||
# setting to 'True'. Or pass a list of state module names to automatically
|
||||
# aggregate just those types.
|
||||
|
|
|
@ -2568,7 +2568,7 @@ will be shown for each state run.
|
|||
.. conf_master:: state_output_pct
|
||||
|
||||
``state_output_pct``
|
||||
------------------------
|
||||
--------------------
|
||||
|
||||
Default: ``False``
|
||||
|
||||
|
@ -2579,6 +2579,20 @@ as a percent of total actions will be shown for each state run.
|
|||
|
||||
state_output_pct: False
|
||||
|
||||
.. conf_master:: state_compress_ids
|
||||
|
||||
``state_compress_ids``
|
||||
----------------------
|
||||
|
||||
Default: ``False``
|
||||
|
||||
The ``state_compress_ids`` setting aggregates information about states which
|
||||
have multiple "names" under the same state ID in the highstate output.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
state_compress_ids: False
|
||||
|
||||
.. conf_master:: state_aggregate
|
||||
|
||||
``state_aggregate``
|
||||
|
|
|
@ -2301,7 +2301,7 @@ will be shown for each state run.
|
|||
.. conf_minion:: state_output_pct
|
||||
|
||||
``state_output_pct``
|
||||
------------------------
|
||||
--------------------
|
||||
|
||||
Default: ``False``
|
||||
|
||||
|
@ -2312,6 +2312,20 @@ as a percent of total actions will be shown for each state run.
|
|||
|
||||
state_output_pct: False
|
||||
|
||||
.. conf_minion:: state_compress_ids
|
||||
|
||||
``state_compress_ids``
|
||||
----------------------
|
||||
|
||||
Default: ``False``
|
||||
|
||||
The ``state_compress_ids`` setting aggregates information about states which
|
||||
have multiple "names" under the same state ID in the highstate output.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
state_compress_ids: False
|
||||
|
||||
.. conf_minion:: autoload_dynamic_modules
|
||||
|
||||
``autoload_dynamic_modules``
|
||||
|
|
|
@ -376,6 +376,9 @@ VALID_OPTS = immutabletypes.freeze(
|
|||
"state_output_profile": bool,
|
||||
# Tells the highstate outputter whether success and failure percents will be shown for each state run
|
||||
"state_output_pct": bool,
|
||||
# Tells the highstate outputter to aggregate information about states which
|
||||
# have multiple "names" under the same state ID in the highstate output.
|
||||
"state_compress_ids": bool,
|
||||
# When true, states run in the order defined in an SLS file, unless requisites re-order them
|
||||
"state_auto_order": bool,
|
||||
# Fire events as state chunks are processed by the state compiler
|
||||
|
|
|
@ -60,6 +60,12 @@ state_output_pct:
|
|||
Set `state_output_pct` to `True` in order to add "Success %" and "Failure %"
|
||||
to the "Summary" section at the end of the highstate output.
|
||||
|
||||
state_compress_ids:
|
||||
Set `state_compress_ids` to `True` to aggregate information about states
|
||||
which have multiple "names" under the same state ID in the highstate output.
|
||||
This is useful in combination with the `terse_id` value set in the
|
||||
`state_output` option when states are using the `names` state parameter.
|
||||
|
||||
Example usage:
|
||||
|
||||
If ``state_output: filter`` is set in the configuration file:
|
||||
|
@ -118,6 +124,7 @@ Example output with no special settings in configuration files:
|
|||
"""
|
||||
|
||||
|
||||
import collections
|
||||
import logging
|
||||
import pprint
|
||||
import re
|
||||
|
@ -131,6 +138,135 @@ import salt.utils.stringutils
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _compress_ids(data):
|
||||
"""
|
||||
Function to take incoming raw state data and roll IDs with multiple names
|
||||
into a single state block for reporting purposes. This functionality is most
|
||||
useful for any "_id" state_output options, such as ``terse_id``.
|
||||
|
||||
The following example state has one ID and four names.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
mix-matched results:
|
||||
cmd.run:
|
||||
- names:
|
||||
- "true"
|
||||
- "false"
|
||||
- "/bin/true"
|
||||
- "/bin/false"
|
||||
|
||||
With ``state_output: terse_id`` set, this can create many lines of output
|
||||
which are not unique enough to be worth the screen real estate they occupy.
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
19:10:10.969049 [ 8.546 ms] cmd.run Changed Name: mix-matched results
|
||||
19:10:10.977998 [ 8.606 ms] cmd.run Failed Name: mix-matched results
|
||||
19:10:10.987116 [ 7.618 ms] cmd.run Changed Name: mix-matched results
|
||||
19:10:10.995172 [ 9.344 ms] cmd.run Failed Name: mix-matched results
|
||||
|
||||
Enabling ``state_compress_ids: True`` consolidates the state data by ID and
|
||||
result (e.g. success or failure). The earliest start time is chosen for
|
||||
display, duration is aggregated, and the total number of names if shown in
|
||||
parentheses to the right of the ID.
|
||||
|
||||
.. code-block:: text
|
||||
|
||||
19:10:46.283323 [ 16.236 ms] cmd.run Changed Name: mix-matched results (2)
|
||||
19:10:46.292181 [ 16.255 ms] cmd.run Failed Name: mix-matched results (2)
|
||||
|
||||
A better real world use case would be passing dozens of files and
|
||||
directories to the ``names`` parameter of the ``file.absent`` state. The
|
||||
amount of lines consolidated in that case would be substantial.
|
||||
"""
|
||||
if not isinstance(data, dict):
|
||||
return data
|
||||
|
||||
compressed = {}
|
||||
|
||||
# any failures to compress result in passing the original data
|
||||
# to the highstate outputter without modification
|
||||
try:
|
||||
for host, hostdata in data.items():
|
||||
compressed[host] = {}
|
||||
# count the number of unique IDs. use sls name and result in the key
|
||||
# so differences can be shown separately in the output
|
||||
id_count = collections.Counter(
|
||||
[
|
||||
"_".join(
|
||||
map(str, [info["__id__"], info["__sls__"], info["result"]])
|
||||
)
|
||||
for tname, info in hostdata.items()
|
||||
]
|
||||
)
|
||||
for tname, info in hostdata.items():
|
||||
_id = "_".join(
|
||||
map(str, [info["__id__"], info["__sls__"], info["result"]])
|
||||
)
|
||||
# state does not need to be compressed
|
||||
if id_count[_id] == 1:
|
||||
compressed[host][tname] = info
|
||||
continue
|
||||
|
||||
# replace name to create a single key by sls and result
|
||||
comps = tname.split("_|-")
|
||||
comps[2] = "_".join(
|
||||
map(
|
||||
str,
|
||||
[
|
||||
"state_compressed",
|
||||
info["__sls__"],
|
||||
info["__id__"],
|
||||
info["result"],
|
||||
],
|
||||
)
|
||||
)
|
||||
comps[1] = "{} ({})".format(info["__id__"], id_count[_id])
|
||||
tname = "_|-".join(comps)
|
||||
|
||||
# store the first entry as-is
|
||||
if tname not in compressed[host]:
|
||||
compressed[host][tname] = info
|
||||
continue
|
||||
|
||||
# subsequent entries for compression will use the lowest
|
||||
# __run_num__ value, the sum of the duration, and the earliest
|
||||
# start time found
|
||||
compressed[host][tname]["__run_num__"] = min(
|
||||
info["__run_num__"], compressed[host][tname]["__run_num__"]
|
||||
)
|
||||
compressed[host][tname]["duration"] = round(
|
||||
sum([info["duration"], compressed[host][tname]["duration"]]), 3
|
||||
)
|
||||
compressed[host][tname]["start_time"] = sorted(
|
||||
[info["start_time"], compressed[host][tname]["start_time"]]
|
||||
)[0]
|
||||
|
||||
# changes are turned into a dict of changes keyed by name
|
||||
if compressed[host][tname].get("changes") and info.get("changes"):
|
||||
if not compressed[host][tname]["changes"].get("compressed changes"):
|
||||
compressed[host][tname]["changes"] = {
|
||||
"compressed changes": {
|
||||
compressed[host][tname]["name"]: compressed[host][
|
||||
tname
|
||||
]["changes"]
|
||||
}
|
||||
}
|
||||
compressed[host][tname]["changes"]["compressed changes"].update(
|
||||
{info["name"]: info["changes"]}
|
||||
)
|
||||
elif info.get("changes"):
|
||||
compressed[host][tname]["changes"] = {
|
||||
"compressed changes": {info["name"]: info["changes"]}
|
||||
}
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.warning("Unable to compress state output by ID! Returning output normally.")
|
||||
return data
|
||||
|
||||
return compressed
|
||||
|
||||
|
||||
def output(data, **kwargs): # pylint: disable=unused-argument
|
||||
"""
|
||||
The HighState Outputter is only meant to be used with the state.highstate
|
||||
|
@ -170,6 +306,10 @@ def output(data, **kwargs): # pylint: disable=unused-argument
|
|||
if orchestrator_output:
|
||||
del data["retcode"]
|
||||
|
||||
# pre-process data if state_compress_ids is set
|
||||
if __opts__.get("state_compress_ids", False):
|
||||
data = _compress_ids(data)
|
||||
|
||||
indent_level = kwargs.get("indent_level", 1)
|
||||
ret = [
|
||||
_format_host(host, hostdata, indent_level=indent_level)[0]
|
||||
|
@ -246,7 +386,19 @@ def _format_host(host, data, indent_level=1):
|
|||
ret = data[tname]
|
||||
# Increment result counts
|
||||
rcounts.setdefault(ret["result"], 0)
|
||||
rcounts[ret["result"]] += 1
|
||||
|
||||
# unpack state compression counts
|
||||
compressed_count = 1
|
||||
if (
|
||||
__opts__.get("state_compress_ids", False)
|
||||
and "_|-state_compressed_" in tname
|
||||
):
|
||||
_, _id, _, _ = tname.split("_|-")
|
||||
count_match = re.search(r"\((\d+)\)$", _id)
|
||||
if count_match:
|
||||
compressed_count = int(count_match.group(1))
|
||||
|
||||
rcounts[ret["result"]] += compressed_count
|
||||
rduration = ret.get("duration", 0)
|
||||
try:
|
||||
rdurations.append(float(rduration))
|
||||
|
@ -269,7 +421,11 @@ def _format_host(host, data, indent_level=1):
|
|||
nchanges += 1
|
||||
else:
|
||||
schanged, ctext = _format_changes(ret["changes"])
|
||||
nchanges += 1 if schanged else 0
|
||||
# if compressed, the changes are keyed by name
|
||||
if schanged and compressed_count > 1:
|
||||
nchanges += len(ret["changes"].get("compressed changes", {})) or 1
|
||||
else:
|
||||
nchanges += 1 if schanged else 0
|
||||
|
||||
# Skip this state if it was successful & diff output was requested
|
||||
if (
|
||||
|
|
|
@ -1,6 +1,10 @@
|
|||
import copy
|
||||
|
||||
import pytest
|
||||
import salt.config
|
||||
import salt.output.highstate as highstate
|
||||
from salt.utils.odict import OrderedDict
|
||||
from tests.support.mock import patch
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
@ -164,3 +168,381 @@ def test_pct_summary_output():
|
|||
assert "Failure %: 0.0" in actual_output
|
||||
assert "Total states run: 1" in actual_output
|
||||
assert " file2" in actual_output
|
||||
|
||||
|
||||
def test__compress_ids():
|
||||
"""
|
||||
Tests for expected data return for _compress_ids
|
||||
and proper formatting using the state_compress_ids option
|
||||
"""
|
||||
# Stop using OrderedDict once we drop Py3.5 support
|
||||
data = OrderedDict()
|
||||
# raw data entering the outputter
|
||||
data["local"] = {
|
||||
"cmd_|-mix-matched results_|-/bin/false_|-run": {
|
||||
"__id__": "mix-matched results",
|
||||
"__run_num__": 7,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {"pid": 6554, "retcode": 1, "stderr": "", "stdout": ""},
|
||||
"comment": "Command " '"/bin/false" ' "run",
|
||||
"duration": 8.57,
|
||||
"name": "/bin/false",
|
||||
"result": False,
|
||||
"start_time": "15:38:22.666578",
|
||||
},
|
||||
"cmd_|-mix-matched results_|-/bin/true_|-run": {
|
||||
"__id__": "mix-matched results",
|
||||
"__run_num__": 6,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {"pid": 6553, "retcode": 0, "stderr": "", "stdout": ""},
|
||||
"comment": "Command " '"/bin/true" ' "run",
|
||||
"duration": 7.728,
|
||||
"name": "/bin/true",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.658452",
|
||||
},
|
||||
"cmd_|-mix-matched results_|-false_|-run": {
|
||||
"__id__": "mix-matched results",
|
||||
"__run_num__": 5,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {"pid": 6552, "retcode": 1, "stderr": "", "stdout": ""},
|
||||
"comment": "Command " '"false" run',
|
||||
"duration": 7.832,
|
||||
"name": "false",
|
||||
"result": False,
|
||||
"start_time": "15:38:22.650225",
|
||||
},
|
||||
"cmd_|-mix-matched results_|-true_|-run": {
|
||||
"__id__": "mix-matched results",
|
||||
"__run_num__": 4,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {"pid": 6551, "retcode": 0, "stderr": "", "stdout": ""},
|
||||
"comment": "Command " '"true" run',
|
||||
"duration": 8.538,
|
||||
"name": "true",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.641293",
|
||||
},
|
||||
"file_|-one clean one changes_|-/tmp/changes_|-managed": {
|
||||
"__id__": "one clean one changes",
|
||||
"__run_num__": 13,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {"diff": "New file"},
|
||||
"comment": "File /tmp/changes updated",
|
||||
"duration": 3.17,
|
||||
"name": "/tmp/changes",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.703770",
|
||||
},
|
||||
"file_|-one clean one changes_|-/tmp/clean_|-managed": {
|
||||
"__id__": "one clean one changes",
|
||||
"__run_num__": 12,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "File /tmp/clean is in the correct state",
|
||||
"duration": 20.123,
|
||||
"name": "/tmp/clean",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.683450",
|
||||
},
|
||||
"test_|-succeed clean_|-bar_|-succeed_without_changes": {
|
||||
"__id__": "succeed clean",
|
||||
"__run_num__": 11,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 0.759,
|
||||
"name": "bar",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.678512",
|
||||
},
|
||||
"test_|-succeed clean_|-foo_|-succeed_without_changes": {
|
||||
"__id__": "succeed clean",
|
||||
"__run_num__": 10,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 0.676,
|
||||
"name": "foo",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.677678",
|
||||
},
|
||||
"test_|-succeed clean_|-hello_|-succeed_without_changes": {
|
||||
"__id__": "succeed clean",
|
||||
"__run_num__": 8,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 1.071,
|
||||
"name": "hello",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.675588",
|
||||
},
|
||||
"test_|-succeed clean_|-world_|-succeed_without_changes": {
|
||||
"__id__": "succeed clean",
|
||||
"__run_num__": 9,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 0.693,
|
||||
"name": "world",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.676826",
|
||||
},
|
||||
"test_|-succeed with changes_|-bar_|-succeed_with_changes": {
|
||||
"__id__": "succeed with changes",
|
||||
"__run_num__": 3,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"comment": "Success!",
|
||||
"duration": 0.829,
|
||||
"name": "bar",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.639625",
|
||||
},
|
||||
"test_|-succeed with changes_|-foo_|-succeed_with_changes": {
|
||||
"__id__": "succeed with changes",
|
||||
"__run_num__": 2,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"comment": "Success!",
|
||||
"duration": 0.739,
|
||||
"name": "foo",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.638724",
|
||||
},
|
||||
"test_|-succeed with changes_|-hello_|-succeed_with_changes": {
|
||||
"__id__": "succeed with changes",
|
||||
"__run_num__": 0,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"comment": "Success!",
|
||||
"duration": 0.812,
|
||||
"name": "hello",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.636883",
|
||||
},
|
||||
"test_|-succeed with changes_|-world_|-succeed_with_changes": {
|
||||
"__id__": "succeed with changes",
|
||||
"__run_num__": 1,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"comment": "Success!",
|
||||
"duration": 0.694,
|
||||
"name": "world",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.637872",
|
||||
},
|
||||
"test_|-single clean_|-single_|-succeed_without_changes": {
|
||||
"__id__": "single clean",
|
||||
"__run_num__": 14,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 0.693,
|
||||
"name": "single",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.676827",
|
||||
},
|
||||
}
|
||||
# Stop using OrderedDict once we drop Py3.5 support
|
||||
expected_output = OrderedDict()
|
||||
# expected compressed raw data for outputter
|
||||
expected_output["local"] = {
|
||||
"cmd_|-mix-matched results (2)_|-state_compressed_compress_test_mix-matched results_False_|-run": {
|
||||
"__id__": "mix-matched results",
|
||||
"__run_num__": 5,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"compressed changes": {
|
||||
"/bin/false": {
|
||||
"pid": 6554,
|
||||
"retcode": 1,
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
},
|
||||
"false": {
|
||||
"pid": 6552,
|
||||
"retcode": 1,
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
},
|
||||
}
|
||||
},
|
||||
"comment": "Command " '"/bin/false" ' "run",
|
||||
"duration": 16.402,
|
||||
"name": "/bin/false",
|
||||
"result": False,
|
||||
"start_time": "15:38:22.650225",
|
||||
},
|
||||
"cmd_|-mix-matched results (2)_|-state_compressed_compress_test_mix-matched results_True_|-run": {
|
||||
"__id__": "mix-matched results",
|
||||
"__run_num__": 4,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"compressed changes": {
|
||||
"/bin/true": {
|
||||
"pid": 6553,
|
||||
"retcode": 0,
|
||||
"stderr": "",
|
||||
"stdout": "",
|
||||
},
|
||||
"true": {"pid": 6551, "retcode": 0, "stderr": "", "stdout": ""},
|
||||
}
|
||||
},
|
||||
"comment": "Command " '"/bin/true" ' "run",
|
||||
"duration": 16.266,
|
||||
"name": "/bin/true",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.641293",
|
||||
},
|
||||
"file_|-one clean one changes (2)_|-state_compressed_compress_test_one clean one changes_True_|-managed": {
|
||||
"__id__": "one clean one changes",
|
||||
"__run_num__": 12,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {"diff": "New file"},
|
||||
"comment": "File /tmp/changes updated",
|
||||
"duration": 23.293,
|
||||
"name": "/tmp/changes",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.683450",
|
||||
},
|
||||
"test_|-succeed clean (4)_|-state_compressed_compress_test_succeed clean_True_|-succeed_without_changes": {
|
||||
"__id__": "succeed clean",
|
||||
"__run_num__": 8,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 3.199,
|
||||
"name": "bar",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.675588",
|
||||
},
|
||||
"test_|-succeed with changes (4)_|-state_compressed_compress_test_succeed with changes_True_|-succeed_with_changes": {
|
||||
"__id__": "succeed with changes",
|
||||
"__run_num__": 0,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {
|
||||
"compressed changes": {
|
||||
"bar": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"foo": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"hello": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
"world": {
|
||||
"testing": {
|
||||
"new": "Something pretended to change",
|
||||
"old": "Unchanged",
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
"comment": "Success!",
|
||||
"duration": 3.074,
|
||||
"name": "bar",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.636883",
|
||||
},
|
||||
"test_|-single clean_|-single_|-succeed_without_changes": {
|
||||
"__id__": "single clean",
|
||||
"__run_num__": 14,
|
||||
"__sls__": "compress_test",
|
||||
"changes": {},
|
||||
"comment": "Success!",
|
||||
"duration": 0.693,
|
||||
"name": "single",
|
||||
"result": True,
|
||||
"start_time": "15:38:22.676827",
|
||||
},
|
||||
}
|
||||
actual_output = highstate._compress_ids(data)
|
||||
|
||||
# return properly compressed data
|
||||
assert actual_output == expected_output
|
||||
|
||||
# check output text for formatting
|
||||
opts = copy.deepcopy(highstate.__opts__)
|
||||
opts["state_compress_ids"] = True
|
||||
with patch("salt.output.highstate.__opts__", opts, create=True):
|
||||
actual_output = highstate.output(data)
|
||||
assert " ID: succeed with changes (4)" in actual_output
|
||||
assert (
|
||||
" Name: state_compressed_compress_test_succeed with changes_True"
|
||||
in actual_output
|
||||
)
|
||||
assert " compressed changes:" in actual_output
|
||||
assert " ID: mix-matched results (2)" in actual_output
|
||||
assert (
|
||||
" Name: state_compressed_compress_test_mix-matched results_True"
|
||||
in actual_output
|
||||
)
|
||||
assert (
|
||||
" Name: state_compressed_compress_test_mix-matched results_False"
|
||||
in actual_output
|
||||
)
|
||||
assert " ID: succeed clean (4)" in actual_output
|
||||
assert (
|
||||
" Name: state_compressed_compress_test_succeed clean_True"
|
||||
in actual_output
|
||||
)
|
||||
assert " ID: one clean one changes (2)" in actual_output
|
||||
assert (
|
||||
" Name: state_compressed_compress_test_one clean one changes_True"
|
||||
in actual_output
|
||||
)
|
||||
assert " diff:" in actual_output
|
||||
assert "Succeeded: 13 (changed=9)" in actual_output
|
||||
assert "Failed: 2" in actual_output
|
||||
assert "Success %: 86.67" in actual_output
|
||||
assert "Failure %: 13.33" in actual_output
|
||||
assert "Total states run: 15" in actual_output
|
||||
|
||||
# pop out a __run_num__ to break the data
|
||||
data["local"]["cmd_|-mix-matched results_|-/bin/false_|-run"].pop("__run_num__")
|
||||
actual_output = highstate._compress_ids(data)
|
||||
|
||||
# expecting return of original data to let the outputter figure it out
|
||||
assert actual_output == data
|
||||
|
||||
|
||||
def test__compress_ids_not_dict():
|
||||
"""
|
||||
Simple test for returning original malformed data
|
||||
to let the outputter figure it out.
|
||||
"""
|
||||
data = ["malformed"]
|
||||
actual_output = highstate._compress_ids(data)
|
||||
assert actual_output == data
|
||||
|
|
Loading…
Add table
Reference in a new issue