Enhance openscap module: add xccdf_eval call

This commit is contained in:
Michael Calmer 2021-03-08 16:40:59 +01:00 committed by Megan Wilhite
parent 2244d93fb4
commit d92ba4c8e2
3 changed files with 333 additions and 0 deletions

1
changelog/59756.added Normal file
View file

@ -0,0 +1 @@
adding new call for openscap xccdf eval supporting new parameters

View file

@ -4,6 +4,7 @@ Module for OpenSCAP Management
"""
import os.path
import shlex
import shutil
import tempfile
@ -55,6 +56,103 @@ _OSCAP_EXIT_CODES_MAP = {
}
def xccdf_eval(xccdffile, ovalfiles=None, **kwargs):
"""
Run ``oscap xccdf eval`` commands on minions.
It uses cp.push_dir to upload the generated files to the salt master
in the master's minion files cachedir
(defaults to ``/var/cache/salt/master/minions/minion-id/files``)
It needs ``file_recv`` set to ``True`` in the master configuration file.
xccdffile
the path to the xccdf file to evaluate
ovalfiles
additional oval definition files
profile
the name of Profile to be evaluated
rule
the name of a single rule to be evaluated
oval_results
save OVAL results as well (True or False)
results
write XCCDF Results into given file
report
write HTML report into given file
fetch_remote_resources
download remote content referenced by XCCDF (True or False)
remediate
automatically execute XCCDF fix elements for failed rules.
Use of this option is always at your own risk. (True or False)
CLI Example:
.. code-block:: bash
salt '*' openscap.xccdf_eval /usr/share/openscap/scap-yast2sec-xccdf.xml profile=Default
"""
success = True
error = None
upload_dir = None
returncode = None
if not ovalfiles:
ovalfiles = []
cmd_opts = ["oscap", "xccdf", "eval"]
if kwargs.get("oval_results"):
cmd_opts.append("--oval-results")
if "results" in kwargs:
cmd_opts.append("--results")
cmd_opts.append(kwargs["results"])
if "report" in kwargs:
cmd_opts.append("--report")
cmd_opts.append(kwargs["report"])
if "profile" in kwargs:
cmd_opts.append("--profile")
cmd_opts.append(kwargs["profile"])
if "rule" in kwargs:
cmd_opts.append("--rule")
cmd_opts.append(kwargs["rule"])
if kwargs.get("fetch_remote_resources"):
cmd_opts.append("--fetch-remote-resources")
if kwargs.get("remediate"):
cmd_opts.append("--remediate")
cmd_opts.append(xccdffile)
cmd_opts.extend(ovalfiles)
if not os.path.exists(xccdffile):
success = False
error = "XCCDF File '{}' does not exist".format(xccdffile)
for ofile in ovalfiles:
if success and not os.path.exists(ofile):
success = False
error = "Oval File '{}' does not exist".format(ofile)
if success:
tempdir = tempfile.mkdtemp()
proc = Popen(cmd_opts, stdout=PIPE, stderr=PIPE, cwd=tempdir)
(stdoutdata, error) = proc.communicate()
success = _OSCAP_EXIT_CODES_MAP[proc.returncode]
returncode = proc.returncode
if success:
__salt__["cp.push_dir"](tempdir)
upload_dir = tempdir
shutil.rmtree(tempdir, ignore_errors=True)
return dict(
success=success, upload_dir=upload_dir, error=error, returncode=returncode
)
def xccdf(params):
"""
Run ``oscap xccdf`` commands on minions.

View file

@ -21,6 +21,7 @@ class OpenscapTestCase(TestCase):
"salt.modules.openscap.tempfile.mkdtemp",
Mock(return_value=self.random_temp_dir),
),
patch("salt.modules.openscap.os.path.exists", Mock(return_value=True)),
]
for patcher in patchers:
self.apply_patch(patcher)
@ -211,3 +212,236 @@ class OpenscapTestCase(TestCase):
"returncode": None,
},
)
def test_new_openscap_xccdf_eval_success(self):
with patch(
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
**{"returncode": 0, "communicate.return_value": ("", "")}
)
),
):
response = openscap.xccdf_eval(
self.policy_file,
profile="Default",
oval_results=True,
results="results.xml",
report="report.html",
)
self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
expected_cmd = [
"oscap",
"xccdf",
"eval",
"--oval-results",
"--results",
"results.xml",
"--report",
"report.html",
"--profile",
"Default",
self.policy_file,
]
openscap.Popen.assert_called_once_with(
expected_cmd,
cwd=openscap.tempfile.mkdtemp.return_value,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
)
openscap.__salt__["cp.push_dir"].assert_called_once_with(
self.random_temp_dir
)
self.assertEqual(openscap.shutil.rmtree.call_count, 1)
self.assertEqual(
response,
{
"upload_dir": self.random_temp_dir,
"error": "",
"success": True,
"returncode": 0,
},
)
def test_new_openscap_xccdf_eval_success_with_extra_ovalfiles(self):
with patch(
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
**{"returncode": 0, "communicate.return_value": ("", "")}
)
),
):
response = openscap.xccdf_eval(
self.policy_file,
["/usr/share/xml/another-oval.xml", "/usr/share/xml/oval.xml"],
profile="Default",
oval_results=True,
results="results.xml",
report="report.html",
)
self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
expected_cmd = [
"oscap",
"xccdf",
"eval",
"--oval-results",
"--results",
"results.xml",
"--report",
"report.html",
"--profile",
"Default",
self.policy_file,
"/usr/share/xml/another-oval.xml",
"/usr/share/xml/oval.xml",
]
openscap.Popen.assert_called_once_with(
expected_cmd,
cwd=openscap.tempfile.mkdtemp.return_value,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
)
openscap.__salt__["cp.push_dir"].assert_called_once_with(
self.random_temp_dir
)
self.assertEqual(openscap.shutil.rmtree.call_count, 1)
self.assertEqual(
response,
{
"upload_dir": self.random_temp_dir,
"error": "",
"success": True,
"returncode": 0,
},
)
def test_new_openscap_xccdf_eval_success_with_failing_rules(self):
with patch(
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
**{"returncode": 2, "communicate.return_value": ("", "some error")}
)
),
):
response = openscap.xccdf_eval(
self.policy_file,
profile="Default",
oval_results=True,
results="results.xml",
report="report.html",
)
self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1)
expected_cmd = [
"oscap",
"xccdf",
"eval",
"--oval-results",
"--results",
"results.xml",
"--report",
"report.html",
"--profile",
"Default",
self.policy_file,
]
openscap.Popen.assert_called_once_with(
expected_cmd,
cwd=openscap.tempfile.mkdtemp.return_value,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
)
openscap.__salt__["cp.push_dir"].assert_called_once_with(
self.random_temp_dir
)
self.assertEqual(openscap.shutil.rmtree.call_count, 1)
self.assertEqual(
response,
{
"upload_dir": self.random_temp_dir,
"error": "some error",
"success": True,
"returncode": 2,
},
)
def test_new_openscap_xccdf_eval_success_ignore_unknown_params(self):
with patch(
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
**{"returncode": 2, "communicate.return_value": ("", "some error")}
)
),
):
response = openscap.xccdf_eval(
"/policy/file",
param="Default",
profile="Default",
oval_results=True,
results="results.xml",
report="report.html",
)
self.assertEqual(
response,
{
"upload_dir": self.random_temp_dir,
"error": "some error",
"success": True,
"returncode": 2,
},
)
expected_cmd = [
"oscap",
"xccdf",
"eval",
"--oval-results",
"--results",
"results.xml",
"--report",
"report.html",
"--profile",
"Default",
"/policy/file",
]
openscap.Popen.assert_called_once_with(
expected_cmd,
cwd=openscap.tempfile.mkdtemp.return_value,
stderr=subprocess.PIPE,
stdout=subprocess.PIPE,
)
def test_new_openscap_xccdf_eval_evaluation_error(self):
with patch(
"salt.modules.openscap.Popen",
MagicMock(
return_value=Mock(
**{
"returncode": 1,
"communicate.return_value": ("", "evaluation error"),
}
)
),
):
response = openscap.xccdf_eval(
self.policy_file,
profile="Default",
oval_results=True,
results="results.xml",
report="report.html",
)
self.assertEqual(
response,
{
"upload_dir": None,
"error": "evaluation error",
"success": False,
"returncode": 1,
},
)