mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
CVE fix
This commit is contained in:
parent
f89ef9204f
commit
e0cdb80b55
7 changed files with 241 additions and 91 deletions
|
@ -568,11 +568,6 @@ class Fileserver:
|
|||
saltenv = salt.utils.stringutils.to_unicode(saltenv)
|
||||
back = self.backends(back)
|
||||
kwargs = {}
|
||||
fnd = {"path": "", "rel": ""}
|
||||
if os.path.isabs(path):
|
||||
return fnd
|
||||
if "../" in path:
|
||||
return fnd
|
||||
if salt.utils.url.is_escaped(path):
|
||||
# don't attempt to find URL query arguments in the path
|
||||
path = salt.utils.url.unescape(path)
|
||||
|
@ -588,6 +583,10 @@ class Fileserver:
|
|||
args = comp.split("=", 1)
|
||||
kwargs[args[0]] = args[1]
|
||||
|
||||
fnd = {"path": "", "rel": ""}
|
||||
if os.path.isabs(path) or "../" in path:
|
||||
return fnd
|
||||
|
||||
if "env" in kwargs:
|
||||
# "env" is not supported; Use "saltenv".
|
||||
kwargs.pop("env")
|
||||
|
|
|
@ -27,6 +27,7 @@ import salt.utils.hashutils
|
|||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.verify
|
||||
import salt.utils.versions
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -98,6 +99,11 @@ def find_file(path, saltenv="base", **kwargs):
|
|||
if saltenv == "__env__":
|
||||
root = root.replace("__env__", actual_saltenv)
|
||||
full = os.path.join(root, path)
|
||||
|
||||
# Refuse to serve file that is not under the root.
|
||||
if not salt.utils.verify.clean_path(root, full, subdir=True):
|
||||
continue
|
||||
|
||||
if os.path.isfile(full) and not salt.fileserver.is_file_ignored(__opts__, full):
|
||||
fnd["path"] = full
|
||||
fnd["rel"] = path
|
||||
|
@ -128,6 +134,26 @@ def serve_file(load, fnd):
|
|||
ret["dest"] = fnd["rel"]
|
||||
gzip = load.get("gzip", None)
|
||||
fpath = os.path.normpath(fnd["path"])
|
||||
|
||||
actual_saltenv = saltenv = load["saltenv"]
|
||||
if saltenv not in __opts__["file_roots"]:
|
||||
if "__env__" in __opts__["file_roots"]:
|
||||
log.debug(
|
||||
"salt environment '%s' maps to __env__ file_roots directory", saltenv
|
||||
)
|
||||
saltenv = "__env__"
|
||||
else:
|
||||
return fnd
|
||||
file_in_root = False
|
||||
for root in __opts__["file_roots"][saltenv]:
|
||||
if saltenv == "__env__":
|
||||
root = root.replace("__env__", actual_saltenv)
|
||||
# Refuse to serve file that is not under the root.
|
||||
if salt.utils.verify.clean_path(root, fpath, subdir=True):
|
||||
file_in_root = True
|
||||
if not file_in_root:
|
||||
return ret
|
||||
|
||||
with salt.utils.files.fopen(fpath, "rb") as fp_:
|
||||
fp_.seek(load["loc"])
|
||||
data = fp_.read(__opts__["file_buffer_size"])
|
||||
|
|
|
@ -1036,7 +1036,10 @@ class MWorker(salt.utils.process.SignalHandlingProcess):
|
|||
"""
|
||||
key = payload["enc"]
|
||||
load = payload["load"]
|
||||
ret = {"aes": self._handle_aes, "clear": self._handle_clear}[key](load)
|
||||
if key == "aes":
|
||||
ret = self.handle_aes(load)
|
||||
else:
|
||||
ret = self.handle_clear(load)
|
||||
raise salt.ext.tornado.gen.Return(ret)
|
||||
|
||||
def _post_stats(self, start, cmd):
|
||||
|
@ -1738,10 +1741,16 @@ class AESFuncs(TransportMethods):
|
|||
self.mminion.returners[fstr](load["jid"], load["load"])
|
||||
|
||||
# Register the syndic
|
||||
|
||||
# We are creating a path using user suplied input. Use the
|
||||
# clean_path to prevent a directory traversal.
|
||||
root = os.path.join(self.opts["cachedir"], "syndics")
|
||||
syndic_cache_path = os.path.join(
|
||||
self.opts["cachedir"], "syndics", load["id"]
|
||||
)
|
||||
if not os.path.exists(syndic_cache_path):
|
||||
if salt.utils.verify.clean_path(
|
||||
root, syndic_cache_path
|
||||
) and not os.path.exists(syndic_cache_path):
|
||||
path_name = os.path.split(syndic_cache_path)[0]
|
||||
if not os.path.exists(path_name):
|
||||
os.makedirs(path_name)
|
||||
|
|
|
@ -53,6 +53,11 @@ def tmp_state_tree(tmp_path, testfile, unicode_filename, unicode_dirname):
|
|||
return dirname
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def testfilepath(tmp_state_tree, testfile):
|
||||
return tmp_state_tree / testfile.name
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def configure_loader_modules(tmp_state_tree, temp_salt_master):
|
||||
opts = temp_salt_master.config.copy()
|
||||
|
@ -75,17 +80,17 @@ def test_find_file(tmp_state_tree):
|
|||
assert full_path_to_file == ret["path"]
|
||||
|
||||
|
||||
def test_serve_file(testfile):
|
||||
def test_serve_file(testfilepath):
|
||||
with patch.dict(roots.__opts__, {"file_buffer_size": 262144}):
|
||||
load = {
|
||||
"saltenv": "base",
|
||||
"path": str(testfile),
|
||||
"path": str(testfilepath),
|
||||
"loc": 0,
|
||||
}
|
||||
fnd = {"path": str(testfile), "rel": "testfile"}
|
||||
fnd = {"path": str(testfilepath), "rel": "testfile"}
|
||||
ret = roots.serve_file(load, fnd)
|
||||
|
||||
with salt.utils.files.fopen(str(testfile), "rb") as fp_:
|
||||
with salt.utils.files.fopen(str(testfilepath), "rb") as fp_:
|
||||
data = fp_.read()
|
||||
|
||||
assert ret == {"data": data, "dest": "testfile"}
|
||||
|
@ -236,7 +241,7 @@ def test_update_mtime_map():
|
|||
# between Python releases.
|
||||
lines_written = sorted(mtime_map_mock.write_calls())
|
||||
expected = sorted(
|
||||
salt.utils.stringutils.to_bytes("{key}:{val}\n".format(key=key, val=val))
|
||||
salt.utils.stringutils.to_bytes(f"{key}:{val}\n")
|
||||
for key, val in new_mtime_map.items()
|
||||
)
|
||||
assert lines_written == expected, lines_written
|
||||
|
@ -277,3 +282,33 @@ def test_update_mtime_map_unicode_error(tmp_path):
|
|||
},
|
||||
"backend": "roots",
|
||||
}
|
||||
|
||||
|
||||
def test_find_file_not_in_root(tmp_state_tree):
|
||||
"""
|
||||
Fileroots should never 'find' a file that is outside of it's root.
|
||||
"""
|
||||
badfile = pathlib.Path(tmp_state_tree).parent / "bar"
|
||||
badfile.write_text("Bad file")
|
||||
badpath = f"../bar"
|
||||
ret = roots.find_file(badpath)
|
||||
assert ret == {"path": "", "rel": ""}
|
||||
badpath = f"{tmp_state_tree / '..' / 'bar'}"
|
||||
ret = roots.find_file(badpath)
|
||||
assert ret == {"path": "", "rel": ""}
|
||||
|
||||
|
||||
def test_serve_file_not_in_root(tmp_state_tree):
|
||||
"""
|
||||
Fileroots should never 'serve' a file that is outside of it's root.
|
||||
"""
|
||||
badfile = pathlib.Path(tmp_state_tree).parent / "bar"
|
||||
badfile.write_text("Bad file")
|
||||
badpath = f"../bar"
|
||||
load = {"path": "salt://|..\\bar", "saltenv": "base", "loc": 0}
|
||||
fnd = {
|
||||
"path": f"{tmp_state_tree / '..' / 'bar'}",
|
||||
"rel": f"{pathlib.Path('..') / 'bar'}",
|
||||
}
|
||||
ret = roots.serve_file(load, fnd)
|
||||
assert ret == {"data": "", "dest": "../bar"}
|
||||
|
|
127
tests/pytests/unit/test_fileserver.py
Normal file
127
tests/pytests/unit/test_fileserver.py
Normal file
|
@ -0,0 +1,127 @@
|
|||
"""
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import time
|
||||
|
||||
import salt.fileserver
|
||||
import salt.utils.files
|
||||
|
||||
|
||||
def test_diff_with_diffent_keys():
|
||||
"""
|
||||
Test that different maps are indeed reported different
|
||||
"""
|
||||
map1 = {"file1": 1234}
|
||||
map2 = {"file2": 1234}
|
||||
assert salt.fileserver.diff_mtime_map(map1, map2) is True
|
||||
|
||||
|
||||
def test_diff_with_diffent_values():
|
||||
"""
|
||||
Test that different maps are indeed reported different
|
||||
"""
|
||||
map1 = {"file1": 12345}
|
||||
map2 = {"file1": 1234}
|
||||
assert salt.fileserver.diff_mtime_map(map1, map2) is True
|
||||
|
||||
|
||||
def test_whitelist():
|
||||
opts = {
|
||||
"fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
|
||||
"extension_modules": "",
|
||||
}
|
||||
fs = salt.fileserver.Fileserver(opts)
|
||||
assert sorted(fs.servers.whitelist) == sorted(
|
||||
["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
|
||||
), fs.servers.whitelist
|
||||
|
||||
|
||||
def test_future_file_list_cache_file_ignored(tmp_path):
|
||||
opts = {
|
||||
"fileserver_backend": ["roots"],
|
||||
"cachedir": tmp_path,
|
||||
"extension_modules": "",
|
||||
}
|
||||
|
||||
back_cachedir = os.path.join(tmp_path, "file_lists/roots")
|
||||
os.makedirs(os.path.join(back_cachedir))
|
||||
|
||||
# Touch a couple files
|
||||
for filename in ("base.p", "foo.txt"):
|
||||
with salt.utils.files.fopen(os.path.join(back_cachedir, filename), "wb") as _f:
|
||||
if filename == "base.p":
|
||||
_f.write(b"\x80")
|
||||
|
||||
# Set modification time to file list cache file to 1 year in the future
|
||||
now = datetime.datetime.utcnow()
|
||||
future = now + datetime.timedelta(days=365)
|
||||
mod_time = time.mktime(future.timetuple())
|
||||
os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
|
||||
|
||||
list_cache = os.path.join(back_cachedir, "base.p")
|
||||
w_lock = os.path.join(back_cachedir, ".base.w")
|
||||
ret = salt.fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
|
||||
assert (
|
||||
ret[1] is True
|
||||
), "Cache file list cache file is not refreshed when future modification time"
|
||||
|
||||
|
||||
def test_file_server_url_escape(tmp_path):
|
||||
(tmp_path / "srv").mkdir()
|
||||
(tmp_path / "srv" / "salt").mkdir()
|
||||
(tmp_path / "foo").mkdir()
|
||||
(tmp_path / "foo" / "bar").write_text("Bad file")
|
||||
fileroot = str(tmp_path / "srv" / "salt")
|
||||
badfile = str(tmp_path / "foo" / "bar")
|
||||
opts = {
|
||||
"fileserver_backend": ["roots"],
|
||||
"extension_modules": "",
|
||||
"optimization_order": [
|
||||
0,
|
||||
],
|
||||
"file_roots": {
|
||||
"base": [fileroot],
|
||||
},
|
||||
"file_ignore_regex": "",
|
||||
"file_ignore_glob": "",
|
||||
}
|
||||
fs = salt.fileserver.Fileserver(opts)
|
||||
ret = fs.find_file(
|
||||
"salt://|..\\..\\..\\foo/bar",
|
||||
"base",
|
||||
)
|
||||
assert ret == {"path": "", "rel": ""}
|
||||
|
||||
|
||||
def test_file_server_serve_url_escape(tmp_path):
|
||||
(tmp_path / "srv").mkdir()
|
||||
(tmp_path / "srv" / "salt").mkdir()
|
||||
(tmp_path / "foo").mkdir()
|
||||
(tmp_path / "foo" / "bar").write_text("Bad file")
|
||||
fileroot = str(tmp_path / "srv" / "salt")
|
||||
badfile = str(tmp_path / "foo" / "bar")
|
||||
opts = {
|
||||
"fileserver_backend": ["roots"],
|
||||
"extension_modules": "",
|
||||
"optimization_order": [
|
||||
0,
|
||||
],
|
||||
"file_roots": {
|
||||
"base": [fileroot],
|
||||
},
|
||||
"file_ignore_regex": "",
|
||||
"file_ignore_glob": "",
|
||||
"file_buffer_size": 2048,
|
||||
}
|
||||
fs = salt.fileserver.Fileserver(opts)
|
||||
ret = fs.serve_file(
|
||||
{
|
||||
"path": "salt://|..\\..\\..\\foo/bar",
|
||||
"saltenv": "base",
|
||||
"loc": 0,
|
||||
}
|
||||
)
|
||||
assert ret == {"data": "", "dest": ""}
|
|
@ -1,3 +1,4 @@
|
|||
import pathlib
|
||||
import time
|
||||
|
||||
import pytest
|
||||
|
@ -160,3 +161,35 @@ def test_when_syndic_return_processes_load_then_correct_values_should_be_returne
|
|||
with patch.object(encrypted_requests, "_return", autospec=True) as fake_return:
|
||||
encrypted_requests._syndic_return(payload)
|
||||
fake_return.assert_called_with(expected_return)
|
||||
|
||||
|
||||
def test_syndic_return_cache_dir_creation(encrypted_requests):
|
||||
"""master's cachedir for a syndic will be created by AESFuncs._syndic_return method"""
|
||||
cachedir = pathlib.Path(encrypted_requests.opts["cachedir"])
|
||||
assert not (cachedir / "syndics").exists()
|
||||
encrypted_requests._syndic_return(
|
||||
{
|
||||
"id": "mamajama",
|
||||
"jid": "",
|
||||
"return": {},
|
||||
}
|
||||
)
|
||||
assert (cachedir / "syndics").exists()
|
||||
assert (cachedir / "syndics" / "mamajama").exists()
|
||||
|
||||
|
||||
def test_syndic_return_cache_dir_creation_traversal(encrypted_requests):
|
||||
"""
|
||||
master's AESFuncs._syndic_return method cachdir creation is not vulnerable to a directory traversal
|
||||
"""
|
||||
cachedir = pathlib.Path(encrypted_requests.opts["cachedir"])
|
||||
assert not (cachedir / "syndics").exists()
|
||||
encrypted_requests._syndic_return(
|
||||
{
|
||||
"id": "../mamajama",
|
||||
"jid": "",
|
||||
"return": {},
|
||||
}
|
||||
)
|
||||
assert not (cachedir / "syndics").exists()
|
||||
assert not (cachedir / "mamajama").exists()
|
||||
|
|
|
@ -1,79 +0,0 @@
|
|||
"""
|
||||
:codeauthor: Joao Mesquita <jmesquita@sangoma.com>
|
||||
"""
|
||||
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import time
|
||||
|
||||
import salt.utils.files
|
||||
from salt import fileserver
|
||||
from tests.support.helpers import with_tempdir
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase
|
||||
|
||||
|
||||
class MapDiffTestCase(TestCase):
|
||||
def test_diff_with_diffent_keys(self):
|
||||
"""
|
||||
Test that different maps are indeed reported different
|
||||
"""
|
||||
map1 = {"file1": 1234}
|
||||
map2 = {"file2": 1234}
|
||||
assert fileserver.diff_mtime_map(map1, map2) is True
|
||||
|
||||
def test_diff_with_diffent_values(self):
|
||||
"""
|
||||
Test that different maps are indeed reported different
|
||||
"""
|
||||
map1 = {"file1": 12345}
|
||||
map2 = {"file1": 1234}
|
||||
assert fileserver.diff_mtime_map(map1, map2) is True
|
||||
|
||||
|
||||
class VCSBackendWhitelistCase(TestCase, LoaderModuleMockMixin):
|
||||
def setup_loader_modules(self):
|
||||
return {fileserver: {}}
|
||||
|
||||
def test_whitelist(self):
|
||||
opts = {
|
||||
"fileserver_backend": ["roots", "git", "s3fs", "hgfs", "svn"],
|
||||
"extension_modules": "",
|
||||
}
|
||||
fs = fileserver.Fileserver(opts)
|
||||
assert sorted(fs.servers.whitelist) == sorted(
|
||||
["git", "gitfs", "hg", "hgfs", "svn", "svnfs", "roots", "s3fs"]
|
||||
), fs.servers.whitelist
|
||||
|
||||
@with_tempdir()
|
||||
def test_future_file_list_cache_file_ignored(self, cachedir):
|
||||
opts = {
|
||||
"fileserver_backend": ["roots"],
|
||||
"cachedir": cachedir,
|
||||
"extension_modules": "",
|
||||
}
|
||||
|
||||
back_cachedir = os.path.join(cachedir, "file_lists/roots")
|
||||
os.makedirs(os.path.join(back_cachedir))
|
||||
|
||||
# Touch a couple files
|
||||
for filename in ("base.p", "foo.txt"):
|
||||
with salt.utils.files.fopen(
|
||||
os.path.join(back_cachedir, filename), "wb"
|
||||
) as _f:
|
||||
if filename == "base.p":
|
||||
_f.write(b"\x80")
|
||||
|
||||
# Set modification time to file list cache file to 1 year in the future
|
||||
now = datetime.datetime.utcnow()
|
||||
future = now + datetime.timedelta(days=365)
|
||||
mod_time = time.mktime(future.timetuple())
|
||||
os.utime(os.path.join(back_cachedir, "base.p"), (mod_time, mod_time))
|
||||
|
||||
list_cache = os.path.join(back_cachedir, "base.p")
|
||||
w_lock = os.path.join(back_cachedir, ".base.w")
|
||||
ret = fileserver.check_file_list_cache(opts, "files", list_cache, w_lock)
|
||||
assert (
|
||||
ret[1] is True
|
||||
), "Cache file list cache file is not refreshed when future modification time"
|
Loading…
Add table
Reference in a new issue