mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2018.3' into lgpo_boolean_element_fix
This commit is contained in:
commit
5a98fb9751
15 changed files with 302 additions and 38 deletions
|
@ -91,8 +91,12 @@ def install(gems, # pylint: disable=C0103
|
|||
Doesn't play nice with multiple gems at once
|
||||
:param rdoc: boolean : False
|
||||
Generate RDoc documentation for the gem(s).
|
||||
For rubygems > 3 this is interpreted as the --no-document arg and the
|
||||
ri option will then be ignored
|
||||
:param ri: boolean : False
|
||||
Generate RI documentation for the gem(s).
|
||||
For rubygems > 3 this is interpreted as the --no-document arg and the
|
||||
rdoc option will then be ignored
|
||||
:param pre_releases: boolean : False
|
||||
Include pre-releases in the available versions
|
||||
:param proxy: string : None
|
||||
|
@ -119,12 +123,18 @@ def install(gems, # pylint: disable=C0103
|
|||
options = []
|
||||
if version:
|
||||
options.extend(['--version', version])
|
||||
if not rdoc:
|
||||
options.append('--no-rdoc')
|
||||
if not ri:
|
||||
options.append('--no-ri')
|
||||
if pre_releases:
|
||||
options.append('--pre')
|
||||
if _has_rubygems_3(ruby=ruby, runas=runas, gem_bin=gem_bin):
|
||||
if not rdoc or not ri:
|
||||
options.append('--no-document')
|
||||
if pre_releases:
|
||||
options.append('--prerelease')
|
||||
else:
|
||||
if not rdoc:
|
||||
options.append('--no-rdoc')
|
||||
if not ri:
|
||||
options.append('--no-ri')
|
||||
if pre_releases:
|
||||
options.append('--pre')
|
||||
if proxy:
|
||||
options.extend(['-p', proxy])
|
||||
if source:
|
||||
|
@ -224,6 +234,45 @@ def update_system(version='', ruby=None, runas=None, gem_bin=None):
|
|||
runas=runas)
|
||||
|
||||
|
||||
def version(ruby=None, runas=None, gem_bin=None):
|
||||
'''
|
||||
Print out the version of gem
|
||||
|
||||
:param gem_bin: string : None
|
||||
Full path to ``gem`` binary to use.
|
||||
:param ruby: string : None
|
||||
If RVM or rbenv are installed, the ruby version and gemset to use.
|
||||
Ignored if ``gem_bin`` is specified.
|
||||
:param runas: string : None
|
||||
The user to run gem as.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' gem.version
|
||||
'''
|
||||
cmd = ['--version']
|
||||
stdout = _gem(cmd,
|
||||
ruby,
|
||||
gem_bin=gem_bin,
|
||||
runas=runas)
|
||||
ret = {}
|
||||
for line in salt.utils.itertools.split(stdout, '\n'):
|
||||
match = re.match(r'[.0-9]+', line)
|
||||
if match:
|
||||
ret = line
|
||||
break
|
||||
return ret
|
||||
|
||||
|
||||
def _has_rubygems_3(ruby=None, runas=None, gem_bin=None):
|
||||
match = re.match(r'^3\..*', version(ruby=ruby, runas=runas, gem_bin=gem_bin))
|
||||
if match:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def list_(prefix='', ruby=None, runas=None, gem_bin=None):
|
||||
'''
|
||||
List locally installed gems.
|
||||
|
|
|
@ -5083,7 +5083,7 @@ def _findOptionValueAdvAudit(option):
|
|||
field_names = _get_audit_defaults('fieldnames')
|
||||
# If the file doesn't exist anywhere, create it with default
|
||||
# fieldnames
|
||||
__salt__['file.mkdir'](os.path.dirname(f_audit))
|
||||
__salt__['file.makedirs'](f_audit)
|
||||
__salt__['file.write'](f_audit, ','.join(field_names))
|
||||
|
||||
audit_settings = {}
|
||||
|
@ -5187,7 +5187,7 @@ def _set_audit_file_data(option, value):
|
|||
# Copy the temporary csv file over the existing audit.csv in both
|
||||
# locations if a value was written
|
||||
__salt__['file.copy'](f_temp.name, f_audit, remove_existing=True)
|
||||
__salt__['file.mkdir'](os.path.dirname(f_audit_gpo))
|
||||
__salt__['file.makedirs'](f_audit_gpo)
|
||||
__salt__['file.copy'](f_temp.name, f_audit_gpo, remove_existing=True)
|
||||
finally:
|
||||
f_temp.close()
|
||||
|
@ -5605,7 +5605,7 @@ def _getDataFromRegPolData(search_string, policy_data, return_value_name=False):
|
|||
)
|
||||
].split(encoded_semicolon)
|
||||
if len(pol_entry) >= 2:
|
||||
valueName = pol_entry[1]
|
||||
valueName = pol_entry[1].decode('utf-16-le').rstrip(chr(0))
|
||||
if len(pol_entry) >= 5:
|
||||
value = pol_entry[4]
|
||||
if vtype == 'REG_DWORD' or vtype == 'REG_QWORD':
|
||||
|
@ -5923,18 +5923,18 @@ def _processValueItem(element, reg_key, reg_valuename, policy, parent_element,
|
|||
']'.encode('utf-16-le')])
|
||||
if 'expandable' in element.attrib:
|
||||
this_vtype = 'REG_EXPAND_SZ'
|
||||
if 'explicitValue' in element.attrib and element.attrib['explicitValue'].lower() == 'true':
|
||||
if element.attrib.get('explicitValue', 'false').lower() == 'true':
|
||||
if this_element_value is not None:
|
||||
element_valuenames = this_element_value.keys()
|
||||
element_values = this_element_value.values()
|
||||
if 'valuePrefix' in element.attrib:
|
||||
element_valuenames = [str(k) for k in this_element_value.keys()]
|
||||
element_values = [str(v) for v in this_element_value.values()]
|
||||
elif 'valuePrefix' in element.attrib:
|
||||
# if the valuePrefix attribute exists, the valuenames are <prefix><number>
|
||||
# most prefixes attributes are empty in the admx files, so the valuenames
|
||||
# end up being just numbers
|
||||
if element.attrib['valuePrefix'] != '':
|
||||
if this_element_value is not None:
|
||||
element_valuenames = ['{0}{1}'.format(element.attrib['valuePrefix'],
|
||||
k) for k in element_valuenames]
|
||||
element_valuenames = ['{0}{1}'.format(
|
||||
element.attrib['valuePrefix'], k) for k in element_valuenames]
|
||||
else:
|
||||
# if there is no valuePrefix attribute, the valuename is the value
|
||||
if element_values is not None:
|
||||
|
|
|
@ -781,10 +781,12 @@ def _check_directory_win(name,
|
|||
if not os.path.isdir(name):
|
||||
changes = {name: {'directory': 'new'}}
|
||||
else:
|
||||
# Check owner
|
||||
# Check owner by SID
|
||||
if win_owner is not None:
|
||||
owner = salt.utils.win_dacl.get_owner(name)
|
||||
if not owner.lower() == win_owner.lower():
|
||||
current_owner = salt.utils.win_dacl.get_owner(name)
|
||||
current_owner_sid = salt.utils.win_functions.get_sid_from_name(current_owner)
|
||||
expected_owner_sid = salt.utils.win_functions.get_sid_from_name(win_owner)
|
||||
if not current_owner_sid == expected_owner_sid:
|
||||
changes['owner'] = win_owner
|
||||
|
||||
# Check perms
|
||||
|
|
|
@ -1394,8 +1394,12 @@ def _remotes_on(port, which_end):
|
|||
Return a set of ip addrs active tcp connections
|
||||
'''
|
||||
port = int(port)
|
||||
ret = set()
|
||||
|
||||
ret = _netlink_tool_remote_on(port, which_end)
|
||||
if ret is not None:
|
||||
return ret
|
||||
|
||||
ret = set()
|
||||
proc_available = False
|
||||
for statf in ['/proc/net/tcp', '/proc/net/tcp6']:
|
||||
if os.path.isfile(statf):
|
||||
|
@ -1446,6 +1450,51 @@ def _parse_tcp_line(line):
|
|||
return ret
|
||||
|
||||
|
||||
def _netlink_tool_remote_on(port, which_end):
|
||||
'''
|
||||
Returns set of ipv4 host addresses of remote established connections
|
||||
on local or remote tcp port.
|
||||
|
||||
Parses output of shell 'ss' to get connections
|
||||
|
||||
[root@salt-master ~]# ss -ant
|
||||
State Recv-Q Send-Q Local Address:Port Peer Address:Port
|
||||
LISTEN 0 511 *:80 *:*
|
||||
LISTEN 0 128 *:22 *:*
|
||||
ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505
|
||||
'''
|
||||
remotes = set()
|
||||
valid = False
|
||||
try:
|
||||
data = subprocess.check_output(['ss', '-ant']) # pylint: disable=minimum-python-version
|
||||
except subprocess.CalledProcessError:
|
||||
log.error('Failed ss')
|
||||
raise
|
||||
except OSError: # not command "No such file or directory"
|
||||
return None
|
||||
|
||||
lines = salt.utils.stringutils.to_str(data).split('\n')
|
||||
for line in lines:
|
||||
if 'Address:Port' in line: # ss tools may not be valid
|
||||
valid = True
|
||||
continue
|
||||
elif 'ESTAB' not in line:
|
||||
continue
|
||||
chunks = line.split()
|
||||
local_host, local_port = chunks[3].split(':', 1)
|
||||
remote_host, remote_port = chunks[4].split(':', 1)
|
||||
|
||||
if which_end == 'remote_port' and int(remote_port) != port:
|
||||
continue
|
||||
if which_end == 'local_port' and int(local_port) != port:
|
||||
continue
|
||||
remotes.add(remote_host)
|
||||
|
||||
if valid is False:
|
||||
remotes = None
|
||||
return remotes
|
||||
|
||||
|
||||
def _sunos_remotes_on(port, which_end):
|
||||
'''
|
||||
SunOS specific helper function.
|
||||
|
|
|
@ -160,9 +160,10 @@ class Schedule(object):
|
|||
if remove_hidden:
|
||||
_schedule = copy.deepcopy(schedule)
|
||||
for job in _schedule:
|
||||
for item in _schedule[job]:
|
||||
if item.startswith('_'):
|
||||
del schedule[job][item]
|
||||
if isinstance(_schedule[job], dict):
|
||||
for item in _schedule[job]:
|
||||
if item.startswith('_'):
|
||||
del schedule[job][item]
|
||||
return schedule
|
||||
|
||||
def _check_max_running(self, func, data, opts):
|
||||
|
|
|
@ -220,6 +220,7 @@ salt/utils/schedule.py:
|
|||
- integration.scheduler.test_postpone
|
||||
- integration.scheduler.test_skip
|
||||
- integration.scheduler.test_maxrunning
|
||||
- integration.scheduler.test_helpers
|
||||
|
||||
salt/utils/vt.py:
|
||||
- integration.cli.test_custom_module
|
||||
|
|
|
@ -183,7 +183,7 @@ class TestDaemon(object):
|
|||
'''
|
||||
Set up the master and minion daemons, and run related cases
|
||||
'''
|
||||
MINIONS_CONNECT_TIMEOUT = MINIONS_SYNC_TIMEOUT = 300
|
||||
MINIONS_CONNECT_TIMEOUT = MINIONS_SYNC_TIMEOUT = 500
|
||||
|
||||
def __init__(self, parser):
|
||||
self.parser = parser
|
||||
|
|
|
@ -17,7 +17,7 @@ class BatchTest(ShellCase):
|
|||
Integration tests for the salt.cli.batch module
|
||||
'''
|
||||
if salt.utils.platform.is_windows():
|
||||
run_timeout = 90
|
||||
run_timeout = 180
|
||||
else:
|
||||
run_timeout = 30
|
||||
|
||||
|
|
|
@ -123,25 +123,25 @@ class WinLgpoTest(ModuleCase):
|
|||
ret = self.run_function('lgpo.set_computer_policy',
|
||||
(policy_name, policy_config))
|
||||
log.debug('lgpo set_computer_policy ret == %s', ret)
|
||||
cmd = ['lgpo.exe',
|
||||
'/parse',
|
||||
'/m',
|
||||
r'c:\Windows\System32\GroupPolicy\Machine\Registry.pol']
|
||||
if assert_true:
|
||||
self.assertTrue(ret)
|
||||
lgpo_output = self.run_function(
|
||||
'cmd.run',
|
||||
(),
|
||||
cmd='lgpo.exe /parse /m c:\\Windows\\System32\\GroupPolicy\\Machine\\Registry.pol')
|
||||
lgpo_output = self.run_function('cmd.run', (), cmd=' '.join(cmd))
|
||||
# validate that the lgpo output doesn't say the format is invalid
|
||||
self.assertIsNone(
|
||||
re.search(
|
||||
r'Invalid file format\.',
|
||||
lgpo_output,
|
||||
re.IGNORECASE), 'Failed validating Registry.pol file format')
|
||||
re.search(r'Invalid file format\.', lgpo_output, re.IGNORECASE),
|
||||
msg='Failed validating Registry.pol file format')
|
||||
# validate that the regexes we expect are in the output
|
||||
for expected_regex in expected_regexes:
|
||||
match = re.search(
|
||||
expected_regex,
|
||||
lgpo_output,
|
||||
re.IGNORECASE)
|
||||
self.assertIsNotNone(match, 'Failed validating policy "{0}" configuration, regex "{1}" not found in lgpo output'.format(policy_name, expected_regex))
|
||||
match = re.search(expected_regex, lgpo_output, re.IGNORECASE)
|
||||
self.assertIsNotNone(
|
||||
match,
|
||||
msg='Failed validating policy "{0}" configuration, regex '
|
||||
'"{1}" not found in lgpo output:\n{2}'
|
||||
''.format(policy_name, expected_regex, lgpo_output))
|
||||
else:
|
||||
# expecting it to fail
|
||||
self.assertNotEqual(ret, True)
|
||||
|
@ -255,6 +255,35 @@ class WinLgpoTest(ModuleCase):
|
|||
'Not Configured',
|
||||
[r'; Source file: c:\\windows\\system32\\grouppolicy\\machine\\registry.pol[\s]*; PARSING COMPLETED.'])
|
||||
|
||||
@destructiveTest
|
||||
def test_set_computer_policy_Pol_HardenedPaths(self):
|
||||
# Disable Pol_HardenedPaths
|
||||
log.debug('Attempting to disable Pol_HardenedPaths')
|
||||
self._testComputerAdmxPolicy(
|
||||
'Pol_HardenedPaths',
|
||||
'Disabled',
|
||||
[r'Computer[\s]*Software\\policies\\Microsoft\\Windows\\NetworkProvider\\HardenedPaths[\s]*\*[\s]*DELETEALLVALUES'])
|
||||
# Configure Pol_HardenedPaths
|
||||
log.debug('Attempting to configure Pol_HardenedPaths')
|
||||
self._testComputerAdmxPolicy(
|
||||
'Pol_HardenedPaths',
|
||||
{
|
||||
'Hardened UNC Paths': {
|
||||
r'\\*\NETLOGON': 'RequireMutualAuthentication=1, RequireIntegrity=1',
|
||||
r'\\*\SYSVOL': 'RequireMutualAuthentication=1, RequireIntegrity=1'
|
||||
}
|
||||
},
|
||||
[
|
||||
r'Computer[\s]*Software\\policies\\Microsoft\\Windows\\NetworkProvider\\HardenedPaths[\s]*\\\\\*\\NETLOGON[\s]*SZ:RequireMutualAuthentication=1, RequireIntegrity=1[\s]*',
|
||||
r'Computer[\s]*Software\\policies\\Microsoft\\Windows\\NetworkProvider\\HardenedPaths[\s]*\\\\\*\\SYSVOL[\s]*SZ:RequireMutualAuthentication=1, RequireIntegrity=1[\s]*',
|
||||
])
|
||||
# Not Configure Pol_HardenedPaths
|
||||
log.debug('Attempting to set Pol_HardenedPaths to Not Configured')
|
||||
self._testComputerAdmxPolicy(
|
||||
'Pol_HardenedPaths',
|
||||
'Not Configured',
|
||||
[r'; Source file: c:\\windows\\system32\\grouppolicy\\machine\\registry.pol[\s]*; PARSING COMPLETED.'])
|
||||
|
||||
@destructiveTest
|
||||
def test_set_computer_policy_WindowsUpdate(self):
|
||||
'''
|
||||
|
|
68
tests/integration/scheduler/test_helpers.py
Normal file
68
tests/integration/scheduler/test_helpers.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.mixins import SaltReturnAssertsMixin
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mock import MagicMock, patch
|
||||
import tests.integration as integration
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.schedule
|
||||
import salt.utils.platform
|
||||
|
||||
from salt.modules.test import ping as ping
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
ROOT_DIR = os.path.join(integration.TMP, 'schedule-unit-tests')
|
||||
SOCK_DIR = os.path.join(ROOT_DIR, 'test-socks')
|
||||
|
||||
DEFAULT_CONFIG = salt.config.minion_config(None)
|
||||
DEFAULT_CONFIG['conf_dir'] = ROOT_DIR
|
||||
DEFAULT_CONFIG['root_dir'] = ROOT_DIR
|
||||
DEFAULT_CONFIG['sock_dir'] = SOCK_DIR
|
||||
DEFAULT_CONFIG['pki_dir'] = os.path.join(ROOT_DIR, 'pki')
|
||||
DEFAULT_CONFIG['cachedir'] = os.path.join(ROOT_DIR, 'cache')
|
||||
|
||||
|
||||
class SchedulerHelpersTest(ModuleCase, SaltReturnAssertsMixin):
|
||||
'''
|
||||
Test scheduler helper functions
|
||||
'''
|
||||
def setUp(self):
|
||||
with patch('salt.utils.schedule.clean_proc_dir', MagicMock(return_value=None)):
|
||||
functions = {'test.ping': ping}
|
||||
self.schedule = salt.utils.schedule.Schedule(copy.deepcopy(DEFAULT_CONFIG), functions, returners={})
|
||||
self.schedule.opts['loop_interval'] = 1
|
||||
|
||||
def tearDown(self):
|
||||
self.schedule.reset()
|
||||
|
||||
def test_get_schedule(self):
|
||||
'''
|
||||
verify that the _get_schedule function works
|
||||
when remove_hidden is True and schedule data
|
||||
contains enabled key
|
||||
'''
|
||||
job_name = 'test_get_schedule'
|
||||
job = {
|
||||
'schedule': {
|
||||
'enabled': True,
|
||||
job_name: {
|
||||
'function': 'test.ping',
|
||||
'seconds': 60
|
||||
}
|
||||
}
|
||||
}
|
||||
# Add the job to the scheduler
|
||||
self.schedule.opts.update(job)
|
||||
|
||||
ret = self.schedule._get_schedule(remove_hidden=True)
|
||||
self.assertEqual(job['schedule'], ret)
|
|
@ -889,6 +889,31 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
self.assertFalse(os.path.exists(straydir))
|
||||
self.assertTrue(os.path.isdir(name))
|
||||
|
||||
def test_directory_is_idempotent(self):
|
||||
'''
|
||||
Ensure the file.directory state produces no changes when rerun.
|
||||
'''
|
||||
name = os.path.join(TMP, 'a_dir_twice')
|
||||
|
||||
if IS_WINDOWS:
|
||||
username = os.environ.get('USERNAME', 'Administrators')
|
||||
domain = os.environ.get('USERDOMAIN', '')
|
||||
fullname = '{0}\\{1}'.format(domain, username)
|
||||
|
||||
ret = self.run_state('file.directory', name=name, win_owner=fullname)
|
||||
else:
|
||||
ret = self.run_state('file.directory', name=name)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
|
||||
if IS_WINDOWS:
|
||||
ret = self.run_state('file.directory', name=name, win_owner=username)
|
||||
else:
|
||||
ret = self.run_state('file.directory', name=name)
|
||||
|
||||
self.assertSaltTrueReturn(ret)
|
||||
self.assertSaltStateChangesEqual(ret, {})
|
||||
|
||||
@with_tempdir()
|
||||
def test_directory_clean_exclude(self, base_dir):
|
||||
'''
|
||||
|
|
|
@ -73,12 +73,29 @@ class TestGemModule(TestCase, LoaderModuleMockMixin):
|
|||
runas=None
|
||||
)
|
||||
|
||||
def test_install_pre_rubygems_3(self):
|
||||
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(gem.__salt__,
|
||||
{'rvm.is_installed': MagicMock(return_value=False),
|
||||
'rbenv.is_installed': MagicMock(return_value=False),
|
||||
'cmd.run_all': mock}),\
|
||||
patch.object(
|
||||
gem, '_has_rubygems_3', MagicMock(return_value=True)):
|
||||
gem.install('rails', pre_releases=True)
|
||||
mock.assert_called_once_with(
|
||||
['gem', 'install', 'rails', '--no-document', '--prerelease'],
|
||||
runas=None,
|
||||
python_shell=False
|
||||
)
|
||||
|
||||
def test_install_pre(self):
|
||||
mock = MagicMock(return_value={'retcode': 0, 'stdout': ''})
|
||||
with patch.dict(gem.__salt__,
|
||||
{'rvm.is_installed': MagicMock(return_value=False),
|
||||
'rbenv.is_installed': MagicMock(return_value=False),
|
||||
'cmd.run_all': mock}):
|
||||
'cmd.run_all': mock}),\
|
||||
patch.object(
|
||||
gem, '_has_rubygems_3', MagicMock(return_value=False)):
|
||||
gem.install('rails', pre_releases=True)
|
||||
mock.assert_called_once_with(
|
||||
['gem', 'install', 'rails', '--no-rdoc', '--no-ri', '--pre'],
|
||||
|
|
|
@ -148,6 +148,7 @@ class BadTestModuleNamesTestCase(TestCase):
|
|||
'integration.scheduler.test_postpone',
|
||||
'integration.scheduler.test_skip',
|
||||
'integration.scheduler.test_maxrunning',
|
||||
'integration.scheduler.test_helpers',
|
||||
'integration.shell.test_spm',
|
||||
'integration.shell.test_cp',
|
||||
'integration.shell.test_syndic',
|
||||
|
|
|
@ -9,9 +9,11 @@
|
|||
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
import os
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.paths import BASE_FILES
|
||||
|
||||
# Import salt libs
|
||||
from salt.utils.filebuffer import BufferedReader, InvalidFileMode
|
||||
|
@ -30,3 +32,22 @@ class TestFileBuffer(TestCase):
|
|||
|
||||
with self.assertRaises(InvalidFileMode):
|
||||
BufferedReader('/tmp/foo', mode='wb')
|
||||
|
||||
def test_issue_51309(self):
|
||||
'''
|
||||
https://github.com/saltstack/salt/issues/51309
|
||||
'''
|
||||
file_name = os.path.join(BASE_FILES, 'grail', 'scene33')
|
||||
|
||||
def find_value(text):
|
||||
stripped_text = text.strip()
|
||||
try:
|
||||
with BufferedReader(file_name) as breader:
|
||||
for chunk in breader:
|
||||
if stripped_text in chunk:
|
||||
return True
|
||||
return False
|
||||
except (IOError, OSError):
|
||||
return False
|
||||
|
||||
self.assertTrue(find_value('We have the Holy Hand Grenade'))
|
||||
|
|
|
@ -72,6 +72,7 @@ integration.runners.test_salt
|
|||
integration.scheduler.test_eval
|
||||
integration.scheduler.test_postpone
|
||||
integration.scheduler.test_skip
|
||||
integration.scheduler.test_helpers
|
||||
integration.sdb.test_env
|
||||
integration.shell.test_arguments
|
||||
integration.shell.test_auth
|
||||
|
|
Loading…
Add table
Reference in a new issue