mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch 'master' into mysql_win
This commit is contained in:
commit
fbadbcfe62
44 changed files with 1798 additions and 451 deletions
|
@ -11,6 +11,6 @@ runTestSuite(
|
|||
nox_passthrough_opts: '--ssh-tests',
|
||||
python_version: 'py2',
|
||||
testrun_timeout: 7,
|
||||
use_spot_instances: true)
|
||||
use_spot_instances: false)
|
||||
|
||||
// vim: ft=groovy
|
||||
|
|
|
@ -350,7 +350,7 @@
|
|||
# the autosign_file and the auto_accept setting.
|
||||
#autoreject_file: /etc/salt/autoreject.conf
|
||||
|
||||
# If the autosign_grains_dir is specified, incoming keys from minons with grain
|
||||
# If the autosign_grains_dir is specified, incoming keys from minions with grain
|
||||
# values matching those defined in files in this directory will be accepted
|
||||
# automatically. This is insecure. Minions need to be configured to send the grains.
|
||||
#autosign_grains_dir: /etc/salt/autosign_grains
|
||||
|
|
|
@ -548,6 +548,11 @@
|
|||
# - edit.vim
|
||||
# - hyper
|
||||
#
|
||||
# List of grains to pass in start event when minion starts up:
|
||||
#start_event_grains:
|
||||
# - machine_id
|
||||
# - uuid
|
||||
#
|
||||
# Top file to execute if startup_states is 'top':
|
||||
#top_file: ''
|
||||
|
||||
|
|
|
@ -302,7 +302,7 @@ syndic_user: salt
|
|||
# the autosign_file and the auto_accept setting.
|
||||
#autoreject_file: /etc/salt/autoreject.conf
|
||||
|
||||
# If the autosign_grains_dir is specified, incoming keys from minons with grain
|
||||
# If the autosign_grains_dir is specified, incoming keys from minions with grain
|
||||
# values matching those defined in files in this directory will be accepted
|
||||
# automatically. This is insecure. Minions need to be configured to send the grains.
|
||||
#autosign_grains_dir: /etc/salt/autosign_grains
|
||||
|
|
|
@ -255,8 +255,8 @@ on_saltstack = 'SALT_ON_SALTSTACK' in os.environ
|
|||
project = 'Salt'
|
||||
repo_primary_branch = 'master' # This is the default branch on GitHub for the Salt project
|
||||
version = salt.version.__version__
|
||||
latest_release = '2019.2.2' # latest release
|
||||
previous_release = '2018.3.4' # latest release from previous branch
|
||||
latest_release = '2019.2.3' # latest release
|
||||
previous_release = '2018.3.5' # latest release from previous branch
|
||||
previous_release_dir = '2018.3' # path on web server for previous branch
|
||||
next_release = '' # next release
|
||||
next_release_dir = '' # path on web server for next release branch
|
||||
|
|
|
@ -2081,6 +2081,21 @@ List of states to run when the minion starts up if ``startup_states`` is set to
|
|||
- edit.vim
|
||||
- hyper
|
||||
|
||||
.. conf_minion:: start_event_grains
|
||||
|
||||
``start_event_grains``
|
||||
----------------------
|
||||
|
||||
Default: ``[]``
|
||||
|
||||
List of grains to pass in start event when minion starts up.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
start_event_grains:
|
||||
- machine_id
|
||||
- uuid
|
||||
|
||||
.. conf_minion:: top_file
|
||||
|
||||
``top_file``
|
||||
|
|
|
@ -27,6 +27,24 @@ Salt engines are configured under an ``engines`` top-level section in your Salt
|
|||
port: 5959
|
||||
proto: tcp
|
||||
|
||||
.. versionadded:: Neon
|
||||
|
||||
Multiple copies of a particular Salt engine can be configured by including the ``engine_module`` parameter in the engine configuration.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
engines:
|
||||
- production_logstash:
|
||||
host: production_log.my_network.com
|
||||
port: 5959
|
||||
proto: tcp
|
||||
engine_module: logstash
|
||||
- develop_logstash:
|
||||
host: develop_log.my_network.com
|
||||
port: 5959
|
||||
proto: tcp
|
||||
engine_module: logstash
|
||||
|
||||
Salt engines must be in the Salt path, or you can add the ``engines_dirs`` option in your Salt master configuration with a list of directories under which Salt attempts to find Salt engines. This option should be formatted as a list of directories to search, such as:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
|
|
@ -369,12 +369,122 @@ Also, slot parsing is now supported inside of nested state data structures (dict
|
|||
- "DO NOT OVERRIDE"
|
||||
ignore_if_missing: True
|
||||
|
||||
- The :py:func:`file.symlink <salt.states.file.symlink>` state was
|
||||
fixed to remove existing file system entries other than files,
|
||||
directories and symbolic links properly.
|
||||
|
||||
- The ``onchanges`` and ``prereq`` :ref:`requisites <requisites>` now behave
|
||||
properly in test mode.
|
||||
|
||||
State Changes
|
||||
=============
|
||||
|
||||
- Added new :py:func:`ssh_auth.manage <salt.states.ssh_auth.manage>` state to
|
||||
ensure only the specified ssh keys are present for the specified user.
|
||||
|
||||
- Added new :py:func:`saltutil <salt.states.saltutil>` state to use instead of
|
||||
``module.run`` to more easily handle change.
|
||||
|
||||
- Added new `onfail_all` requisite form to allow for AND logic when adding
|
||||
onfail states.
|
||||
|
||||
Module Changes
|
||||
==============
|
||||
|
||||
- The :py:func:`debian_ip <salt.modules.debian_ip>` module used by the
|
||||
:py:func:`network.managed <salt.states.network.managed>` state has been
|
||||
heavily refactored. The order that options appear in inet/inet6 blocks may
|
||||
produce cosmetic changes. Many options without an 'ipvX' prefix will now be
|
||||
shared between inet and inet6 blocks. The options ``enable_ipv4`` and
|
||||
``enabled_ipv6`` will now fully remove relevant inet/inet6 blocks. Overriding
|
||||
options by prefixing them with 'ipvX' will now work with most options (i.e.
|
||||
``dns`` can be overriden by ``ipv4dns`` or ``ipv6dns``). The ``proto`` option
|
||||
is now required.
|
||||
|
||||
- Added new :py:func:`boto_ssm <salt.modules.boto_ssm>` module to set and query
|
||||
secrets in AWS SSM parameters.
|
||||
|
||||
- Added new :py:func:`flatpak <salt.modules.flatpak>` module to work with flatpak packages.
|
||||
|
||||
- The :py:func:`file.set_selinux_context <salt.modules.file.set_selinux_context>`
|
||||
module now supports perstant changes with ``persist=True`` by calling the
|
||||
:py:func:`selinux.fcontext_add_policy <salt.modules.selinux.fcontext_add_policy>` module.
|
||||
|
||||
- The :py:func:`file.remove <salt.modules.file.remove>` module was
|
||||
fixed to remove file system entries other than files, directories
|
||||
and symbolic links properly.
|
||||
|
||||
- The :py:func:`yumpkg <salt.modules.yumpkg>` module has been updated to support
|
||||
VMWare's Photon OS, which uses tdnf (a C implementation of dnf).
|
||||
|
||||
- The :py:func:`chocolatey.bootstrap <salt.modules.chocolatey.bootstrap>` function
|
||||
has been updated to support offline installation.
|
||||
|
||||
- The :py:func:`chocolatey.unbootstrap <salt.modules.chocolatey.unbootstrap>` function
|
||||
has been added to uninstall Chocolatey.
|
||||
|
||||
Enhancements to Engines
|
||||
=======================
|
||||
|
||||
Multiple copies of a particular Salt engine can be configured by including
|
||||
the ``engine_module`` parameter in the engine configuration.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
engines:
|
||||
- production_logstash:
|
||||
host: production_log.my_network.com
|
||||
port: 5959
|
||||
proto: tcp
|
||||
engine_module: logstash
|
||||
- develop_logstash:
|
||||
host: develop_log.my_network.com
|
||||
port: 5959
|
||||
proto: tcp
|
||||
engine_module: logstash
|
||||
|
||||
Runner Changes
|
||||
==============
|
||||
|
||||
- The :py:func:`saltutil.sync_auth <salt.runners.saltutil.sync_auth>` function
|
||||
has been added to sync loadable auth modules. :py:func:`saltutil.sync_all <salt.runners.saltutil.sync_all>`
|
||||
will also include these modules.
|
||||
|
||||
Util Changes
|
||||
============
|
||||
|
||||
- The :py:func:`win_dotnet <salt.utils.win_dotnet>` Salt util has been added to
|
||||
make it easier to detect the versions of .NET installed on the system. It includes
|
||||
the following functions:
|
||||
|
||||
- :py:func:`versions <salt.utils.win_dotnet.versions>`
|
||||
- :py:func:`versions_list <salt.utils.win_dotnet.versions_list>`
|
||||
- :py:func:`versions_details <salt.utils.win_dotnet.versions_details>`
|
||||
- :py:func:`version_at_least <salt.utils.win_dotnet.version_at_least>`
|
||||
|
||||
Serializer Changes
|
||||
==================
|
||||
|
||||
- The configparser serializer and deserializer functions can now be made to preserve
|
||||
case of item names by passing 'preserve_case=True' in the options parameter of the function.
|
||||
|
||||
.. note::
|
||||
This is a parameter consumed only by the salt.serializer.configparser serialize and
|
||||
deserialize functions and not the low-level configparser python object.
|
||||
|
||||
For example, in a file.serialze state:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
some.ini:
|
||||
- file.serialize:
|
||||
- formatter: configparser
|
||||
- merge_if_exists: True
|
||||
- deserializer_opts:
|
||||
- preserve_case: True
|
||||
- serializer_opts:
|
||||
- preserve_case: True
|
||||
|
||||
Enhancements to Engines
|
||||
=======================
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
mock >= 3.0.0
|
||||
# PyTest
|
||||
pytest >=4.6.6,<4.7 # PyTest 4.6.x are the last Py2 and Py3 releases
|
||||
pytest-salt >= 2019.12.27
|
||||
|
|
|
@ -1249,6 +1249,7 @@ DEFAULT_MINION_OPTS = immutabletypes.freeze({
|
|||
'state_top_saltenv': None,
|
||||
'startup_states': '',
|
||||
'sls_list': [],
|
||||
'start_event_grains': [],
|
||||
'top_file': '',
|
||||
'thoriumenv': None,
|
||||
'thorium_top': 'top.sls',
|
||||
|
|
|
@ -46,10 +46,22 @@ def start_engines(opts, proc_mgr, proxy=None):
|
|||
engine, engine_opts = next(iter(engine.items()))
|
||||
else:
|
||||
engine_opts = None
|
||||
fun = '{0}.start'.format(engine)
|
||||
engine_name = None
|
||||
if engine_opts is not None and 'engine_module' in engine_opts:
|
||||
fun = '{0}.start'.format(engine_opts['engine_module'])
|
||||
engine_name = engine
|
||||
del engine_opts['engine_module']
|
||||
else:
|
||||
fun = '{0}.start'.format(engine)
|
||||
if fun in engines:
|
||||
start_func = engines[fun]
|
||||
name = '{0}.Engine({1})'.format(__name__, start_func.__module__)
|
||||
if engine_name:
|
||||
name = '{0}.Engine({1}-{2})'.format(__name__,
|
||||
start_func.__module__,
|
||||
engine_name)
|
||||
else:
|
||||
name = '{0}.Engine({1})'.format(__name__,
|
||||
start_func.__module__)
|
||||
log.info('Starting Engine %s', name)
|
||||
proc_mgr.add_process(
|
||||
Engine,
|
||||
|
|
|
@ -688,6 +688,17 @@ def grain_funcs(opts, proxy=None):
|
|||
return ret
|
||||
|
||||
|
||||
def _format_cached_grains(cached_grains):
|
||||
"""
|
||||
Returns cached grains with fixed types, like tuples.
|
||||
"""
|
||||
if cached_grains.get('osrelease_info'):
|
||||
osrelease_info = cached_grains['osrelease_info']
|
||||
if isinstance(osrelease_info, list):
|
||||
cached_grains['osrelease_info'] = tuple(osrelease_info)
|
||||
return cached_grains
|
||||
|
||||
|
||||
def _load_cached_grains(opts, cfn):
|
||||
'''
|
||||
Returns the grains cached in cfn, or None if the cache is too old or is
|
||||
|
@ -720,7 +731,7 @@ def _load_cached_grains(opts, cfn):
|
|||
log.debug('Cached grains are empty, cache might be corrupted. Refreshing.')
|
||||
return None
|
||||
|
||||
return cached_grains
|
||||
return _format_cached_grains(cached_grains)
|
||||
except (IOError, OSError):
|
||||
return None
|
||||
|
||||
|
|
|
@ -484,6 +484,9 @@ def get_multiprocessing_logging_queue():
|
|||
global __MP_LOGGING_QUEUE
|
||||
from salt.utils.platform import is_darwin
|
||||
|
||||
if __MP_LOGGING_QUEUE is not None:
|
||||
return __MP_LOGGING_QUEUE
|
||||
|
||||
if __MP_IN_MAINPROCESS is False:
|
||||
# We're not in the MainProcess, return! No Queue shall be instantiated
|
||||
return __MP_LOGGING_QUEUE
|
||||
|
|
|
@ -1450,6 +1450,11 @@ class Minion(MinionBase):
|
|||
else:
|
||||
return
|
||||
|
||||
if self.opts['start_event_grains']:
|
||||
grains_to_add = dict(
|
||||
[(k, v) for k, v in six.iteritems(self.opts.get('grains', {})) if k in self.opts['start_event_grains']])
|
||||
load['grains'] = grains_to_add
|
||||
|
||||
if sync:
|
||||
try:
|
||||
self._send_req_sync(load, timeout)
|
||||
|
|
|
@ -76,6 +76,7 @@ except ImportError:
|
|||
# pylint: enable=import-error
|
||||
|
||||
APT_LISTS_PATH = "/var/lib/apt/lists"
|
||||
PKG_ARCH_SEPARATOR = ':'
|
||||
|
||||
# Source format for urllib fallback on PPA handling
|
||||
LP_SRC_FORMAT = 'deb http://ppa.launchpad.net/{0}/{1}/ubuntu {2} main'
|
||||
|
@ -188,6 +189,43 @@ def _warn_software_properties(repo):
|
|||
log.warning('Best guess at ppa format: %s', repo)
|
||||
|
||||
|
||||
def normalize_name(name):
|
||||
'''
|
||||
Strips the architecture from the specified package name, if necessary.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.normalize_name zsh:amd64
|
||||
'''
|
||||
try:
|
||||
name, arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
|
||||
except ValueError:
|
||||
return name
|
||||
return name
|
||||
|
||||
|
||||
def parse_arch(name):
|
||||
'''
|
||||
Parse name and architecture from the specified package name.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.parse_arch zsh:amd64
|
||||
'''
|
||||
try:
|
||||
_name, _arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
|
||||
except ValueError:
|
||||
_name, _arch = name, None
|
||||
return {
|
||||
'name': _name,
|
||||
'arch': _arch
|
||||
}
|
||||
|
||||
|
||||
def latest_version(*names, **kwargs):
|
||||
'''
|
||||
Return the latest version of the named package available for upgrade or
|
||||
|
@ -2294,6 +2332,8 @@ def mod_repo(repo, saltenv='base', **kwargs):
|
|||
|
||||
if 'disabled' in kwargs:
|
||||
kwargs['disabled'] = salt.utils.data.is_true(kwargs['disabled'])
|
||||
elif 'enabled' in kwargs:
|
||||
kwargs['disabled'] = not salt.utils.data.is_true(kwargs['enabled'])
|
||||
|
||||
kw_type = kwargs.get('type')
|
||||
kw_dist = kwargs.get('dist')
|
||||
|
|
|
@ -3775,6 +3775,10 @@ def remove(path):
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' file.remove /tmp/foo
|
||||
|
||||
.. versionchanged:: Neon
|
||||
The method now works on all types of file system entries, not just
|
||||
files, directories and symlinks.
|
||||
'''
|
||||
path = os.path.expanduser(path)
|
||||
|
||||
|
@ -3782,7 +3786,7 @@ def remove(path):
|
|||
raise SaltInvocationError('File path must be absolute: {0}'.format(path))
|
||||
|
||||
try:
|
||||
if os.path.isfile(path) or os.path.islink(path):
|
||||
if os.path.islink(path) or (os.path.exists(path) and not os.path.isdir(path)):
|
||||
os.remove(path)
|
||||
return True
|
||||
elif os.path.isdir(path):
|
||||
|
|
|
@ -311,21 +311,32 @@ def format_pkg_list(packages, versions_as_list, attr):
|
|||
'''
|
||||
ret = copy.deepcopy(packages)
|
||||
if attr:
|
||||
ret_attr = {}
|
||||
requested_attr = {'epoch', 'version', 'release', 'arch', 'install_date', 'install_date_time_t'}
|
||||
|
||||
if attr != 'all':
|
||||
requested_attr &= set(attr + ['version'])
|
||||
requested_attr &= set(attr + ['version'] + ['arch'])
|
||||
|
||||
for name in ret:
|
||||
if 'pkg.parse_arch' in __salt__:
|
||||
_parse_arch = __salt__['pkg.parse_arch'](name)
|
||||
else:
|
||||
_parse_arch = {'name': name, 'arch': None}
|
||||
_name = _parse_arch['name']
|
||||
_arch = _parse_arch['arch']
|
||||
|
||||
versions = []
|
||||
pkgname = None
|
||||
for all_attr in ret[name]:
|
||||
filtered_attr = {}
|
||||
for key in requested_attr:
|
||||
if all_attr[key]:
|
||||
if key in all_attr:
|
||||
filtered_attr[key] = all_attr[key]
|
||||
versions.append(filtered_attr)
|
||||
ret[name] = versions
|
||||
return ret
|
||||
if _name and filtered_attr.get('arch', None) == _arch:
|
||||
pkgname = _name
|
||||
ret_attr.setdefault(pkgname or name, []).extend(versions)
|
||||
return ret_attr
|
||||
|
||||
for name in ret:
|
||||
ret[name] = [format_version(d['epoch'], d['version'], d['release'])
|
||||
|
|
|
@ -3151,6 +3151,7 @@ def datadir_init(name,
|
|||
password=password,
|
||||
encoding=encoding,
|
||||
locale=locale,
|
||||
checksums=checksums,
|
||||
runas=runas)
|
||||
return ret['retcode'] == 0
|
||||
|
||||
|
|
|
@ -478,7 +478,7 @@ def _get_reg_software(include_components=True,
|
|||
# https://github.com/aws/amazon-ssm-agent/blob/master/agent/plugins/inventory/gatherers/application/dataProvider_windows.go
|
||||
reg_software = {}
|
||||
|
||||
def skip_component(hive, key, sub_key, use_32bit):
|
||||
def skip_component(hive, key, sub_key, use_32bit_registry):
|
||||
'''
|
||||
'SystemComponent' must be either absent or present with a value of 0,
|
||||
because this value is usually set on programs that have been installed
|
||||
|
@ -493,16 +493,16 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='SystemComponent',
|
||||
use_32bit_registry=use_32bit):
|
||||
use_32bit_registry=use_32bit_registry):
|
||||
if __utils__['reg.read_value'](
|
||||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='SystemComponent',
|
||||
use_32bit_registry=use_32bit)['vdata'] > 0:
|
||||
use_32bit_registry=use_32bit_registry)['vdata'] > 0:
|
||||
return True
|
||||
return False
|
||||
|
||||
def skip_win_installer(hive, key, sub_key, use_32bit):
|
||||
def skip_win_installer(hive, key, sub_key, use_32bit_registry):
|
||||
'''
|
||||
'WindowsInstaller' must be either absent or present with a value of 0.
|
||||
If the value is set to 1, then the application is included in the list
|
||||
|
@ -517,21 +517,21 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='WindowsInstaller',
|
||||
use_32bit_registry=use_32bit):
|
||||
use_32bit_registry=use_32bit_registry):
|
||||
if __utils__['reg.read_value'](
|
||||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='WindowsInstaller',
|
||||
use_32bit_registry=use_32bit)['vdata'] > 0:
|
||||
use_32bit_registry=use_32bit_registry)['vdata'] > 0:
|
||||
squid = salt.utils.win_functions.guid_to_squid(sub_key)
|
||||
if not __utils__['reg.key_exists'](
|
||||
hive='HKLM',
|
||||
key=products_key.format(squid),
|
||||
use_32bit_registry=use_32bit):
|
||||
use_32bit_registry=use_32bit_registry):
|
||||
return True
|
||||
return False
|
||||
|
||||
def skip_uninstall_string(hive, key, sub_key, use_32bit):
|
||||
def skip_uninstall_string(hive, key, sub_key, use_32bit_registry):
|
||||
'''
|
||||
'UninstallString' must be present, because it stores the command line
|
||||
that gets executed by Add/Remove programs, when the user tries to
|
||||
|
@ -544,11 +544,11 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='UninstallString',
|
||||
use_32bit_registry=use_32bit):
|
||||
use_32bit_registry=use_32bit_registry):
|
||||
return True
|
||||
return False
|
||||
|
||||
def skip_release_type(hive, key, sub_key, use_32bit):
|
||||
def skip_release_type(hive, key, sub_key, use_32bit_registry):
|
||||
'''
|
||||
'ReleaseType' must either be absent or if present must not have a
|
||||
value set to 'Security Update', 'Update Rollup', or 'Hotfix', because
|
||||
|
@ -566,16 +566,16 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='ReleaseType',
|
||||
use_32bit_registry=use_32bit):
|
||||
use_32bit_registry=use_32bit_registry):
|
||||
if __utils__['reg.read_value'](
|
||||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='ReleaseType',
|
||||
use_32bit_registry=use_32bit)['vdata'] in skip_types:
|
||||
use_32bit_registry=use_32bit_registry)['vdata'] in skip_types:
|
||||
return True
|
||||
return False
|
||||
|
||||
def skip_parent_key(hive, key, sub_key, use_32bit):
|
||||
def skip_parent_key(hive, key, sub_key, use_32bit_registry):
|
||||
'''
|
||||
'ParentKeyName' must NOT be present, because that indicates it's an
|
||||
update to the parent program.
|
||||
|
@ -587,12 +587,12 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='ParentKeyName',
|
||||
use_32bit_registry=use_32bit):
|
||||
use_32bit_registry=use_32bit_registry):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def add_software(hive, key, sub_key, use_32bit):
|
||||
def add_software(hive, key, sub_key, use_32bit_registry):
|
||||
'''
|
||||
'DisplayName' must be present with a valid value, as this is reflected
|
||||
as the software name returned by pkg.list_pkgs. Also, its value must
|
||||
|
@ -603,7 +603,7 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='DisplayName',
|
||||
use_32bit_registry=use_32bit)
|
||||
use_32bit_registry=use_32bit_registry)
|
||||
|
||||
if (not d_name_regdata['success'] or
|
||||
d_name_regdata['vtype'] not in ['REG_SZ', 'REG_EXPAND_SZ'] or
|
||||
|
@ -619,7 +619,7 @@ def _get_reg_software(include_components=True,
|
|||
hive=hive,
|
||||
key='{0}\\{1}'.format(key, sub_key),
|
||||
vname='DisplayVersion',
|
||||
use_32bit_registry=use_32bit)
|
||||
use_32bit_registry=use_32bit_registry)
|
||||
|
||||
d_vers = 'Not Found'
|
||||
if (d_vers_regdata['success'] and
|
||||
|
@ -635,9 +635,8 @@ def _get_reg_software(include_components=True,
|
|||
# HKLM Uninstall 64 bit
|
||||
kwargs = {'hive': 'HKLM',
|
||||
'key': 'Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall',
|
||||
'use_32bit': False}
|
||||
for sub_key in __utils__['reg.list_keys'](hive=kwargs['hive'],
|
||||
key=kwargs['key']):
|
||||
'use_32bit_registry': False}
|
||||
for sub_key in __utils__['reg.list_keys'](**kwargs):
|
||||
kwargs['sub_key'] = sub_key
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
|
@ -652,10 +651,9 @@ def _get_reg_software(include_components=True,
|
|||
add_software(**kwargs)
|
||||
|
||||
# HKLM Uninstall 32 bit
|
||||
kwargs['use_32bit'] = True
|
||||
for sub_key in __utils__['reg.list_keys'](hive=kwargs['hive'],
|
||||
key=kwargs['key'],
|
||||
use_32bit_registry=kwargs['use_32bit']):
|
||||
kwargs['use_32bit_registry'] = True
|
||||
kwargs.pop('sub_key', False)
|
||||
for sub_key in __utils__['reg.list_keys'](**kwargs):
|
||||
kwargs['sub_key'] = sub_key
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
|
@ -672,10 +670,10 @@ def _get_reg_software(include_components=True,
|
|||
# HKLM Uninstall 64 bit
|
||||
kwargs = {'hive': 'HKLM',
|
||||
'key': 'Software\\Classes\\Installer\\Products',
|
||||
'use_32bit': False}
|
||||
'use_32bit_registry': False}
|
||||
userdata_key = 'Software\\Microsoft\\Windows\\CurrentVersion\\Installer\\' \
|
||||
'UserData\\S-1-5-18\\Products'
|
||||
for sub_key in __utils__['reg.list_keys'](hive=kwargs['hive'], key=kwargs['key']):
|
||||
for sub_key in __utils__['reg.list_keys'](**kwargs):
|
||||
# If the key does not exist in userdata, skip it
|
||||
if not __utils__['reg.key_exists'](
|
||||
hive=kwargs['hive'],
|
||||
|
@ -699,72 +697,73 @@ def _get_reg_software(include_components=True,
|
|||
for user_guid in __utils__['reg.list_keys'](hive=hive_hku):
|
||||
kwargs = {'hive': hive_hku,
|
||||
'key': uninstall_key.format(user_guid),
|
||||
'use_32bit': False}
|
||||
for sub_key in __utils__['reg.list_keys'](hive=kwargs['hive'],
|
||||
key=kwargs['key']):
|
||||
kwargs['sub_key'] = sub_key
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
if skip_win_installer(**kwargs):
|
||||
continue
|
||||
if skip_uninstall_string(**kwargs):
|
||||
continue
|
||||
if skip_release_type(**kwargs):
|
||||
continue
|
||||
if skip_parent_key(**kwargs):
|
||||
continue
|
||||
add_software(**kwargs)
|
||||
|
||||
# While we have the user guid, we're gong to check userdata in HKLM
|
||||
for sub_key in __utils__['reg.list_keys'](hive=hive_hku,
|
||||
key=product_key.format(user_guid)):
|
||||
kwargs = {'hive': 'HKLM',
|
||||
'key': user_data_key.format(user_guid, sub_key),
|
||||
'sub_key': 'InstallProperties',
|
||||
'use_32bit': False}
|
||||
if __utils__['reg.key_exists'](hive=kwargs['hive'],
|
||||
key=kwargs['key']):
|
||||
'use_32bit_registry': False}
|
||||
if __utils__['reg.key_exists'](**kwargs):
|
||||
for sub_key in __utils__['reg.list_keys'](**kwargs):
|
||||
kwargs['sub_key'] = sub_key
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
if skip_win_installer(**kwargs):
|
||||
continue
|
||||
if skip_uninstall_string(**kwargs):
|
||||
continue
|
||||
if skip_release_type(**kwargs):
|
||||
continue
|
||||
if skip_parent_key(**kwargs):
|
||||
continue
|
||||
add_software(**kwargs)
|
||||
|
||||
# While we have the user guid, we're gong to check userdata in HKLM
|
||||
kwargs = {'hive': hive_hku,
|
||||
'key': product_key.format(user_guid),
|
||||
'use_32bit_registry': False}
|
||||
if __utils__['reg.key_exists'](**kwargs):
|
||||
for sub_key in __utils__['reg.list_keys'](**kwargs):
|
||||
kwargs = {'hive': 'HKLM',
|
||||
'key': user_data_key.format(user_guid, sub_key),
|
||||
'use_32bit_registry': False}
|
||||
if __utils__['reg.key_exists'](**kwargs):
|
||||
kwargs['sub_key'] = 'InstallProperties'
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
add_software(**kwargs)
|
||||
|
||||
# Uninstall for each user on the system (HKU), 32 bit
|
||||
for user_guid in __utils__['reg.list_keys'](hive=hive_hku,
|
||||
use_32bit_registry=True):
|
||||
kwargs = {'hive': hive_hku,
|
||||
'key': uninstall_key.format(user_guid),
|
||||
'use_32bit': True}
|
||||
for sub_key in __utils__['reg.list_keys'](hive=kwargs['hive'],
|
||||
key=kwargs['key'],
|
||||
use_32bit_registry=kwargs['use_32bit']):
|
||||
kwargs['sub_key'] = sub_key
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
if skip_win_installer(**kwargs):
|
||||
continue
|
||||
if skip_uninstall_string(**kwargs):
|
||||
continue
|
||||
if skip_release_type(**kwargs):
|
||||
continue
|
||||
if skip_parent_key(**kwargs):
|
||||
continue
|
||||
add_software(**kwargs)
|
||||
|
||||
# While we have the user guid, we're gong to check userdata in HKLM
|
||||
for sub_key_2 in __utils__['reg.list_keys'](hive=hive_hku,
|
||||
key=product_key.format(user_guid),
|
||||
use_32bit_registry=True):
|
||||
kwargs = {'hive': 'HKLM',
|
||||
'key': user_data_key.format(user_guid, sub_key_2),
|
||||
'sub_key': 'InstallProperties',
|
||||
'use_32bit': True}
|
||||
if __utils__['reg.key_exists'](hive=kwargs['hive'],
|
||||
key=kwargs['key'],
|
||||
use_32bit_registry=kwargs['use_32bit']):
|
||||
'use_32bit_registry': True}
|
||||
if __utils__['reg.key_exists'](**kwargs):
|
||||
for sub_key in __utils__['reg.list_keys'](**kwargs):
|
||||
kwargs['sub_key'] = sub_key
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
if skip_win_installer(**kwargs):
|
||||
continue
|
||||
if skip_uninstall_string(**kwargs):
|
||||
continue
|
||||
if skip_release_type(**kwargs):
|
||||
continue
|
||||
if skip_parent_key(**kwargs):
|
||||
continue
|
||||
add_software(**kwargs)
|
||||
|
||||
kwargs = {'hive': hive_hku,
|
||||
'key': product_key.format(user_guid),
|
||||
'use_32bit_registry': True}
|
||||
if __utils__['reg.key_exists'](**kwargs):
|
||||
# While we have the user guid, we're going to check userdata in HKLM
|
||||
for sub_key_2 in __utils__['reg.list_keys'](**kwargs):
|
||||
kwargs = {'hive': 'HKLM',
|
||||
'key': user_data_key.format(user_guid, sub_key_2),
|
||||
'use_32bit_registry': True}
|
||||
if __utils__['reg.key_exists'](**kwargs):
|
||||
kwargs['sub_key'] = 'InstallProperties'
|
||||
if skip_component(**kwargs):
|
||||
continue
|
||||
add_software(**kwargs)
|
||||
|
||||
return reg_software
|
||||
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ def _fix_ctx(m2_ctx, issuer=None):
|
|||
|
||||
def _new_extension(name, value, critical=0, issuer=None, _pyfree=1):
|
||||
'''
|
||||
Create new X509_Extension, This is required because M2Crypto
|
||||
Create new X509_Extension, this is required because M2Crypto
|
||||
doesn't support getting the publickeyidentifier from the issuer
|
||||
to create the authoritykeyidentifier extension.
|
||||
'''
|
||||
|
@ -595,7 +595,7 @@ def read_certificate(certificate):
|
|||
|
||||
def read_certificates(glob_path):
|
||||
'''
|
||||
Returns a dict containing details of a all certificates matching a glob
|
||||
Returns a dict containing details of all certificates matching a glob
|
||||
|
||||
glob_path:
|
||||
A path to certificates to be read and returned.
|
||||
|
@ -658,8 +658,8 @@ def read_crl(crl):
|
|||
|
||||
:depends: - OpenSSL command line tool
|
||||
|
||||
csl:
|
||||
A path or PEM encoded string containing the CSL to read.
|
||||
crl:
|
||||
A path or PEM encoded string containing the CRL to read.
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
@ -754,17 +754,17 @@ def write_pem(text, path, overwrite=True, pem_type=None):
|
|||
PEM string input to be written out.
|
||||
|
||||
path:
|
||||
Path of the file to write the pem out to.
|
||||
Path of the file to write the PEM out to.
|
||||
|
||||
overwrite:
|
||||
If True(default), write_pem will overwrite the entire pem file.
|
||||
If ``True`` (default), write_pem will overwrite the entire PEM file.
|
||||
Set False to preserve existing private keys and dh params that may
|
||||
exist in the pem file.
|
||||
exist in the PEM file.
|
||||
|
||||
pem_type:
|
||||
The PEM type to be saved, for example ``CERTIFICATE`` or
|
||||
``PUBLIC KEY``. Adding this will allow the function to take
|
||||
input that may contain multiple pem types.
|
||||
input that may contain multiple PEM types.
|
||||
|
||||
CLI Example:
|
||||
|
||||
|
@ -818,10 +818,10 @@ def create_private_key(path=None,
|
|||
Length of the private key in bits. Default 2048
|
||||
|
||||
passphrase:
|
||||
Passphrase for encryting the private key
|
||||
Passphrase for encrypting the private key
|
||||
|
||||
cipher:
|
||||
Cipher for encrypting the private key. Has no effect if passhprase is None.
|
||||
Cipher for encrypting the private key. Has no effect if passphrase is None.
|
||||
|
||||
verbose:
|
||||
Provide visual feedback on stdout. Default True
|
||||
|
@ -878,7 +878,7 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
|
|||
:depends: - PyOpenSSL Python module
|
||||
|
||||
path:
|
||||
Path to write the crl to.
|
||||
Path to write the CRL to.
|
||||
|
||||
text:
|
||||
If ``True``, return the PEM text without writing to a file.
|
||||
|
@ -886,14 +886,14 @@ def create_crl( # pylint: disable=too-many-arguments,too-many-locals
|
|||
|
||||
signing_private_key:
|
||||
A path or string of the private key in PEM format that will be used
|
||||
to sign this crl. This is required.
|
||||
to sign the CRL. This is required.
|
||||
|
||||
signing_private_key_passphrase:
|
||||
Passphrase to decrypt the private key.
|
||||
|
||||
signing_cert:
|
||||
A certificate matching the private key that will be used to sign
|
||||
this crl. This is required.
|
||||
the CRL. This is required.
|
||||
|
||||
revoked:
|
||||
A list of dicts containing all the certificates to revoke. Each dict
|
||||
|
@ -1127,9 +1127,9 @@ def create_certificate(
|
|||
Default ``False``.
|
||||
|
||||
overwrite:
|
||||
If True(default), create_certificate will overwrite the entire pem
|
||||
If ``True`` (default), create_certificate will overwrite the entire PEM
|
||||
file. Set False to preserve existing private keys and dh params that
|
||||
may exist in the pem file.
|
||||
may exist in the PEM file.
|
||||
|
||||
kwargs:
|
||||
Any of the properties below can be included as additional
|
||||
|
@ -1139,7 +1139,7 @@ def create_certificate(
|
|||
Request a remotely signed certificate from ca_server. For this to
|
||||
work, a ``signing_policy`` must be specified, and that same policy
|
||||
must be configured on the ca_server. See ``signing_policy`` for
|
||||
details. Also the salt master must permit peers to call the
|
||||
details. Also, the salt master must permit peers to call the
|
||||
``sign_remote_certificate`` function.
|
||||
|
||||
Example:
|
||||
|
@ -1200,7 +1200,7 @@ def create_certificate(
|
|||
|
||||
public_key:
|
||||
The public key to be included in this certificate. This can be sourced
|
||||
from a public key, certificate, csr or private key. If a private key
|
||||
from a public key, certificate, CSR or private key. If a private key
|
||||
is used, the matching public key from the private key will be
|
||||
generated before any processing is done. This means you can request a
|
||||
certificate from a remote CA using a private key file as your
|
||||
|
@ -1264,7 +1264,7 @@ def create_certificate(
|
|||
X509v3 Subject Alternative Name
|
||||
|
||||
crlDistributionPoints:
|
||||
X509v3 CRL distribution points
|
||||
X509v3 CRL Distribution Points
|
||||
|
||||
issuingDistributionPoint:
|
||||
X509v3 Issuing Distribution Point
|
||||
|
@ -1324,7 +1324,7 @@ def create_certificate(
|
|||
signing_policy:
|
||||
A signing policy that should be used to create this certificate.
|
||||
Signing policies should be defined in the minion configuration, or in
|
||||
a minion pillar. It should be a yaml formatted list of arguments
|
||||
a minion pillar. It should be a YAML formatted list of arguments
|
||||
which will override any arguments passed to this function. If the
|
||||
``minions`` key is included in the signing policy, only minions
|
||||
matching that pattern (see match.glob and match.compound) will be
|
||||
|
@ -1385,11 +1385,11 @@ def create_certificate(
|
|||
passphrase=kwargs['public_key_passphrase'])).replace('\n', '')
|
||||
|
||||
# Remove system entries in kwargs
|
||||
# Including listen_in and preqreuired because they are not included
|
||||
# Including listen_in and prerequired because they are not included
|
||||
# in STATE_INTERNAL_KEYWORDS
|
||||
# for salt 2014.7.2
|
||||
for ignore in list(_STATE_INTERNAL_KEYWORDS) + \
|
||||
['listen_in', 'preqrequired', '__prerequired__']:
|
||||
['listen_in', 'prerequired', '__prerequired__']:
|
||||
kwargs.pop(ignore, None)
|
||||
# TODO: Make timeout configurable in Neon
|
||||
certs = __salt__['publish.publish'](
|
||||
|
@ -1712,7 +1712,7 @@ def verify_private_key(private_key, public_key, passphrase=None):
|
|||
|
||||
public_key:
|
||||
The public key to verify, can be a string or path to a PEM formatted
|
||||
certificate, csr, or another private key.
|
||||
certificate, CSR, or another private key.
|
||||
|
||||
passphrase:
|
||||
Passphrase to decrypt the private key.
|
||||
|
@ -1739,7 +1739,7 @@ def verify_signature(certificate, signing_pub_key=None,
|
|||
|
||||
signing_pub_key:
|
||||
The public key to verify, can be a string or path to a PEM formatted
|
||||
certificate, csr, or private key.
|
||||
certificate, CSR, or private key.
|
||||
|
||||
signing_pub_key_passphrase:
|
||||
Passphrase to the signing_pub_key if it is an encrypted private key.
|
||||
|
|
|
@ -66,6 +66,8 @@ log = logging.getLogger(__name__)
|
|||
|
||||
__HOLD_PATTERN = r'[\w+]+(?:[.-][^-]+)*'
|
||||
|
||||
PKG_ARCH_SEPARATOR = '.'
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'pkg'
|
||||
|
||||
|
@ -429,7 +431,7 @@ def normalize_name(name):
|
|||
salt '*' pkg.normalize_name zsh.x86_64
|
||||
'''
|
||||
try:
|
||||
arch = name.rsplit('.', 1)[-1]
|
||||
arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)[-1]
|
||||
if arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
|
||||
return name
|
||||
except ValueError:
|
||||
|
@ -440,6 +442,30 @@ def normalize_name(name):
|
|||
return name
|
||||
|
||||
|
||||
def parse_arch(name):
|
||||
'''
|
||||
Parse name and architecture from the specified package name.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.parse_arch zsh.x86_64
|
||||
'''
|
||||
_name, _arch = None, None
|
||||
try:
|
||||
_name, _arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
|
||||
except ValueError:
|
||||
pass
|
||||
if _arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
|
||||
_name = name
|
||||
_arch = None
|
||||
return {
|
||||
'name': _name,
|
||||
'arch': _arch
|
||||
}
|
||||
|
||||
|
||||
def latest_version(*names, **kwargs):
|
||||
'''
|
||||
Return the latest version of the named package available for upgrade or
|
||||
|
@ -676,8 +702,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
|||
if pkginfo is not None:
|
||||
# see rpm version string rules available at https://goo.gl/UGKPNd
|
||||
pkgver = pkginfo.version
|
||||
epoch = ''
|
||||
release = ''
|
||||
epoch = None
|
||||
release = None
|
||||
if ':' in pkgver:
|
||||
epoch, pkgver = pkgver.split(":", 1)
|
||||
if '-' in pkgver:
|
||||
|
@ -2733,7 +2759,12 @@ def del_repo(repo, basedir=None, **kwargs): # pylint: disable=W0613
|
|||
del filerepos[stanza]['comments']
|
||||
content += '\n[{0}]'.format(stanza)
|
||||
for line in filerepos[stanza]:
|
||||
content += '\n{0}={1}'.format(line, filerepos[stanza][line])
|
||||
# A whitespace is needed at the begining of the new line in order
|
||||
# to avoid breaking multiple line values allowed on repo files.
|
||||
value = filerepos[stanza][line]
|
||||
if isinstance(value, six.string_types) and '\n' in value:
|
||||
value = '\n '.join(value.split('\n'))
|
||||
content += '\n{0}={1}'.format(line, value)
|
||||
content += '\n{0}\n'.format(comments)
|
||||
|
||||
with salt.utils.files.fopen(repofile, 'w') as fileout:
|
||||
|
@ -2868,11 +2899,14 @@ def mod_repo(repo, basedir=None, **kwargs):
|
|||
)
|
||||
content += '[{0}]\n'.format(stanza)
|
||||
for line in six.iterkeys(filerepos[stanza]):
|
||||
# A whitespace is needed at the begining of the new line in order
|
||||
# to avoid breaking multiple line values allowed on repo files.
|
||||
value = filerepos[stanza][line]
|
||||
if isinstance(value, six.string_types) and '\n' in value:
|
||||
value = '\n '.join(value.split('\n'))
|
||||
content += '{0}={1}\n'.format(
|
||||
line,
|
||||
filerepos[stanza][line]
|
||||
if not isinstance(filerepos[stanza][line], bool)
|
||||
else _bool_to_str(filerepos[stanza][line])
|
||||
value if not isinstance(value, bool) else _bool_to_str(value)
|
||||
)
|
||||
content += comments + '\n'
|
||||
|
||||
|
@ -3180,12 +3214,18 @@ def _get_patches(installed_only=False):
|
|||
for line in salt.utils.itertools.split(ret, os.linesep):
|
||||
inst, advisory_id, sev, pkg = re.match(r'([i|\s]) ([^\s]+) +([^\s]+) +([^\s]+)',
|
||||
line).groups()
|
||||
if inst != 'i' and installed_only:
|
||||
continue
|
||||
patches[advisory_id] = {
|
||||
'installed': True if inst == 'i' else False,
|
||||
'summary': pkg
|
||||
}
|
||||
if advisory_id not in patches:
|
||||
patches[advisory_id] = {
|
||||
'installed': True if inst == 'i' else False,
|
||||
'summary': [pkg]
|
||||
}
|
||||
else:
|
||||
patches[advisory_id]['summary'].append(pkg)
|
||||
if inst != 'i':
|
||||
patches[advisory_id]['installed'] = False
|
||||
|
||||
if installed_only:
|
||||
patches = {k: v for k, v in patches.items() if v['installed']}
|
||||
return patches
|
||||
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ ZYPP_HOME = '/etc/zypp'
|
|||
LOCKS = '{0}/locks'.format(ZYPP_HOME)
|
||||
REPOS = '{0}/repos.d'.format(ZYPP_HOME)
|
||||
DEFAULT_PRIORITY = 99
|
||||
PKG_ARCH_SEPARATOR = '.'
|
||||
|
||||
# Define the module's virtual name
|
||||
__virtualname__ = 'pkg'
|
||||
|
@ -591,6 +592,30 @@ def info_available(*names, **kwargs):
|
|||
return ret
|
||||
|
||||
|
||||
def parse_arch(name):
|
||||
'''
|
||||
Parse name and architecture from the specified package name.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' pkg.parse_arch zsh.x86_64
|
||||
'''
|
||||
_name, _arch = None, None
|
||||
try:
|
||||
_name, _arch = name.rsplit(PKG_ARCH_SEPARATOR, 1)
|
||||
except ValueError:
|
||||
pass
|
||||
if _arch not in salt.utils.pkg.rpm.ARCHES + ('noarch',):
|
||||
_name = name
|
||||
_arch = None
|
||||
return {
|
||||
'name': _name,
|
||||
'arch': _arch
|
||||
}
|
||||
|
||||
|
||||
def latest_version(*names, **kwargs):
|
||||
'''
|
||||
Return the latest version of the named package available for upgrade or
|
||||
|
@ -761,8 +786,8 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
|||
if pkginfo:
|
||||
# see rpm version string rules available at https://goo.gl/UGKPNd
|
||||
pkgver = pkginfo.version
|
||||
epoch = ''
|
||||
release = ''
|
||||
epoch = None
|
||||
release = None
|
||||
if ':' in pkgver:
|
||||
epoch, pkgver = pkgver.split(":", 1)
|
||||
if '-' in pkgver:
|
||||
|
|
|
@ -1609,10 +1609,10 @@ def symlink(
|
|||
Create a symbolic link (symlink, soft link)
|
||||
|
||||
If the file already exists and is a symlink pointing to any location other
|
||||
than the specified target, the symlink will be replaced. If the symlink is
|
||||
a regular file or directory then the state will return False. If the
|
||||
regular file or directory is desired to be replaced with a symlink pass
|
||||
force: True, if it is to be renamed, pass a backupname.
|
||||
than the specified target, the symlink will be replaced. If an entry with
|
||||
the same name exists then the state will return False. If the existing
|
||||
entry is desired to be replaced with a symlink pass force: True, if it is
|
||||
to be renamed, pass a backupname.
|
||||
|
||||
name
|
||||
The location of the symlink to create
|
||||
|
@ -1623,10 +1623,14 @@ def symlink(
|
|||
force
|
||||
If the name of the symlink exists and is not a symlink and
|
||||
force is set to False, the state will fail. If force is set to
|
||||
True, the file or directory in the way of the symlink file
|
||||
True, the existing entry in the way of the symlink file
|
||||
will be deleted to make room for the symlink, unless
|
||||
backupname is set, when it will be renamed
|
||||
|
||||
.. versionchanged:: Neon
|
||||
Force will now remove all types of existing file system entries,
|
||||
not just files, directories and symlinks.
|
||||
|
||||
backupname
|
||||
If the name of the symlink exists and is not a symlink, it will be
|
||||
renamed to the backupname. If the backupname already
|
||||
|
@ -1845,8 +1849,8 @@ def symlink(
|
|||
'{1}:{2}'.format(name, user, group))
|
||||
return ret
|
||||
|
||||
elif os.path.isfile(name) or os.path.isdir(name):
|
||||
# It is not a link, but a file or dir
|
||||
elif os.path.exists(name):
|
||||
# It is not a link, but a file, dir, socket, FIFO etc.
|
||||
if backupname is not None:
|
||||
if not os.path.isabs(backupname):
|
||||
if backupname == os.path.basename(backupname):
|
||||
|
@ -1883,14 +1887,12 @@ def symlink(
|
|||
__salt__['file.remove'](name)
|
||||
else:
|
||||
# Otherwise throw an error
|
||||
if os.path.isfile(name):
|
||||
return _error(ret,
|
||||
('File exists where the symlink {0} should be'
|
||||
.format(name)))
|
||||
else:
|
||||
return _error(ret, ((
|
||||
'Directory exists where the symlink {0} should be'
|
||||
).format(name)))
|
||||
fs_entry_type = 'File' if os.path.isfile(name) else \
|
||||
'Directory' if os.path.isdir(name) else \
|
||||
'File system entry'
|
||||
return _error(ret,
|
||||
('{0} exists where the symlink {1} should be'
|
||||
.format(fs_entry_type, name)))
|
||||
|
||||
if not os.path.exists(name):
|
||||
# The link is not present, make it
|
||||
|
@ -5186,15 +5188,9 @@ def comment(name, regex, char='#', backup='.bak'):
|
|||
|
||||
comment_regex = char + unanchor_regex
|
||||
|
||||
# Check if the line is already commented
|
||||
if __salt__['file.search'](name, comment_regex, multiline=True):
|
||||
commented = True
|
||||
else:
|
||||
commented = False
|
||||
|
||||
# Make sure the pattern appears in the file before continuing
|
||||
if commented or not __salt__['file.search'](name, regex, multiline=True):
|
||||
if __salt__['file.search'](name, unanchor_regex, multiline=True):
|
||||
if not __salt__['file.search'](name, regex, multiline=True):
|
||||
if __salt__['file.search'](name, comment_regex, multiline=True):
|
||||
ret['comment'] = 'Pattern already commented'
|
||||
ret['result'] = True
|
||||
return ret
|
||||
|
@ -5292,18 +5288,18 @@ def uncomment(name, regex, char='#', backup='.bak'):
|
|||
|
||||
# Make sure the pattern appears in the file
|
||||
if __salt__['file.search'](
|
||||
name,
|
||||
'{0}[ \t]*{1}'.format(char, regex.lstrip('^')),
|
||||
multiline=True):
|
||||
# Line exists and is commented
|
||||
pass
|
||||
elif __salt__['file.search'](
|
||||
name,
|
||||
'^[ \t]*{0}'.format(regex.lstrip('^')),
|
||||
multiline=True):
|
||||
ret['comment'] = 'Pattern already uncommented'
|
||||
ret['result'] = True
|
||||
return ret
|
||||
elif __salt__['file.search'](
|
||||
name,
|
||||
'{0}[ \t]*{1}'.format(char, regex.lstrip('^')),
|
||||
multiline=True):
|
||||
# Line exists and is commented
|
||||
pass
|
||||
else:
|
||||
return _error(ret, '{0}: Pattern not found'.format(regex))
|
||||
|
||||
|
@ -5798,19 +5794,23 @@ def prepend(name,
|
|||
|
||||
if makedirs is True:
|
||||
dirname = os.path.dirname(name)
|
||||
if not __salt__['file.directory_exists'](dirname):
|
||||
try:
|
||||
_makedirs(name=name)
|
||||
except CommandExecutionError as exc:
|
||||
return _error(ret, 'Drive {0} is not mapped'.format(exc.message))
|
||||
if __opts__['test']:
|
||||
ret['comment'] = 'Directory {0} is set to be updated'.format(dirname)
|
||||
ret['result'] = None
|
||||
else:
|
||||
if not __salt__['file.directory_exists'](dirname):
|
||||
try:
|
||||
_makedirs(name=name)
|
||||
except CommandExecutionError as exc:
|
||||
return _error(ret, 'Drive {0} is not mapped'.format(exc.message))
|
||||
|
||||
check_res, check_msg, check_changes = _check_directory_win(dirname) \
|
||||
if salt.utils.platform.is_windows() \
|
||||
else _check_directory(dirname)
|
||||
check_res, check_msg, check_changes = _check_directory_win(dirname) \
|
||||
if salt.utils.platform.is_windows() \
|
||||
else _check_directory(dirname)
|
||||
|
||||
if not check_res:
|
||||
ret['changes'] = check_changes
|
||||
return _error(ret, check_msg)
|
||||
if not check_res:
|
||||
ret['changes'] = check_changes
|
||||
return _error(ret, check_msg)
|
||||
|
||||
check_res, check_msg = _check_file(name)
|
||||
if not check_res:
|
||||
|
|
|
@ -6,7 +6,7 @@ Manage X509 Certificates
|
|||
|
||||
:depends: M2Crypto
|
||||
|
||||
This module can enable managing a complete PKI infrastructure including creating private keys, CA's,
|
||||
This module can enable managing a complete PKI infrastructure including creating private keys, CAs,
|
||||
certificates and CRLs. It includes the ability to generate a private key on a server, and have the
|
||||
corresponding public key sent to a remote CA to create a CA signed certificate. This can be done in
|
||||
a secure manner, where private keys are always generated locally and never moved across the network.
|
||||
|
@ -94,7 +94,7 @@ the mine where it can be easily retrieved by other minions.
|
|||
|
||||
|
||||
The signing policy defines properties that override any property requested or included in a CRL. It also
|
||||
can define a restricted list of minons which are allowed to remotely invoke this signing policy.
|
||||
can define a restricted list of minions which are allowed to remotely invoke this signing policy.
|
||||
|
||||
/srv/salt/signing_policies.conf
|
||||
|
||||
|
@ -117,7 +117,7 @@ can define a restricted list of minons which are allowed to remotely invoke this
|
|||
|
||||
|
||||
This state will instruct all minions to trust certificates signed by our new CA.
|
||||
Using jinja to strip newlines from the text avoids dealing with newlines in the rendered yaml,
|
||||
Using Jinja to strip newlines from the text avoids dealing with newlines in the rendered YAML,
|
||||
and the :mod:`sign_remote_certificate <salt.states.x509.sign_remote_certificate>` state will
|
||||
handle properly formatting the text before writing the output.
|
||||
|
||||
|
@ -266,8 +266,8 @@ def private_key_managed(name,
|
|||
Cipher for encrypting the private key.
|
||||
|
||||
new:
|
||||
Always create a new key. Defaults to False.
|
||||
Combining new with :mod:`prereq <salt.states.requsities.preqreq>`, or when used as part of a `managed_private_key` can allow key rotation whenever a new certificiate is generated.
|
||||
Always create a new key. Defaults to ``False``.
|
||||
Combining new with :mod:`prereq <salt.states.requsities.preqreq>`, or when used as part of a `managed_private_key` can allow key rotation whenever a new certificate is generated.
|
||||
|
||||
overwrite:
|
||||
Overwrite an existing private key if the provided passphrase cannot decrypt it.
|
||||
|
@ -283,7 +283,7 @@ def private_key_managed(name,
|
|||
|
||||
Example:
|
||||
|
||||
The jinja templating in this example ensures a private key is generated if the file doesn't exist
|
||||
The JINJA templating in this example ensures a private key is generated if the file doesn't exist
|
||||
and that a new private key is generated whenever the certificate that uses it is to be renewed.
|
||||
|
||||
.. code-block:: jinja
|
||||
|
@ -382,7 +382,7 @@ def certificate_managed(name,
|
|||
Manages the private key corresponding to the certificate. All of the
|
||||
arguments supported by :py:func:`x509.private_key_managed
|
||||
<salt.states.x509.private_key_managed>` are supported. If `name` is not
|
||||
speicified or is the same as the name of the certificate, the private
|
||||
specified or is the same as the name of the certificate, the private
|
||||
key and certificate will be written together in the same file.
|
||||
|
||||
append_certs:
|
||||
|
@ -595,14 +595,14 @@ def crl_managed(name,
|
|||
Path to the certificate
|
||||
|
||||
signing_private_key
|
||||
The private key that will be used to sign this crl. This is
|
||||
The private key that will be used to sign the CRL. This is
|
||||
usually your CA's private key.
|
||||
|
||||
signing_private_key_passphrase
|
||||
Passphrase to decrypt the private key.
|
||||
|
||||
signing_cert
|
||||
The certificate of the authority that will be used to sign this crl.
|
||||
The certificate of the authority that will be used to sign the CRL.
|
||||
This is usually your CA's certificate.
|
||||
|
||||
revoked
|
||||
|
@ -618,8 +618,8 @@ def crl_managed(name,
|
|||
of pyOpenSSL less than 0.14.
|
||||
|
||||
days_remaining : 30
|
||||
The crl should be automatically recreated if there are less than
|
||||
``days_remaining`` days until the crl expires. Set to 0 to disable
|
||||
The CRL should be automatically recreated if there are less than
|
||||
``days_remaining`` days until the CRL expires. Set to 0 to disable
|
||||
automatic renewal.
|
||||
|
||||
include_expired : False
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: Pedro Algarvio (pedro@algarvio.me)
|
||||
|
||||
|
||||
salt.syspaths
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
|
@ -21,9 +18,17 @@
|
|||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
import sys
|
||||
import os.path
|
||||
import logging
|
||||
|
||||
__PLATFORM = sys.platform.lower()
|
||||
|
||||
typo_warning = True
|
||||
log = logging.getLogger(__name__)
|
||||
EXPECTED_VARIABLES = ('ROOT_DIR', 'CONFIG_DIR', 'CACHE_DIR', 'SOCK_DIR',
|
||||
'SRV_ROOT_DIR', 'BASE_FILE_ROOTS_DIR', 'HOME_DIR',
|
||||
'BASE_PILLAR_ROOTS_DIR', 'BASE_THORIUM_ROOTS_DIR',
|
||||
'BASE_MASTER_ROOTS_DIR', 'LOGS_DIR', 'PIDFILE_DIR',
|
||||
'SPM_PARENT_PATH', 'SPM_FORMULA_PATH', 'SPM_PILLAR_PATH',
|
||||
'SPM_REACTOR_PATH', 'SHARE_DIR')
|
||||
|
||||
try:
|
||||
# Let's try loading the system paths from the generated module at
|
||||
|
@ -32,14 +37,19 @@ try:
|
|||
except ImportError:
|
||||
import types
|
||||
__generated_syspaths = types.ModuleType(str('salt._syspaths')) # future lint: blacklisted-function
|
||||
for key in ('ROOT_DIR', 'CONFIG_DIR', 'CACHE_DIR', 'SOCK_DIR',
|
||||
'SRV_ROOT_DIR', 'BASE_FILE_ROOTS_DIR', 'HOME_DIR',
|
||||
'BASE_PILLAR_ROOTS_DIR', 'BASE_THORIUM_ROOTS_DIR',
|
||||
'BASE_MASTER_ROOTS_DIR', 'LOGS_DIR', 'PIDFILE_DIR',
|
||||
'SPM_PARENT_PATH', 'SPM_FORMULA_PATH',
|
||||
'SPM_PILLAR_PATH', 'SPM_REACTOR_PATH', 'SHARE_DIR'):
|
||||
for key in EXPECTED_VARIABLES:
|
||||
setattr(__generated_syspaths, key, None)
|
||||
|
||||
else:
|
||||
for key in EXPECTED_VARIABLES:
|
||||
if hasattr(__generated_syspaths, key):
|
||||
continue
|
||||
else:
|
||||
if typo_warning:
|
||||
log.warning('Possible Typo?')
|
||||
log.warning('To dissolve this warning add `[variable] = None` to _syspaths.py')
|
||||
typo_warning = False
|
||||
log.warning('Variable %s is missing, value set to None', key)
|
||||
setattr(__generated_syspaths, key, None) # missing variables defaulted to None
|
||||
|
||||
# Let's find out the path of this module
|
||||
if 'SETUP_DIRNAME' in globals():
|
||||
|
|
|
@ -1454,7 +1454,7 @@ def _parse_tcp_line(line):
|
|||
|
||||
def _netlink_tool_remote_on(port, which_end):
|
||||
'''
|
||||
Returns set of ipv4 host addresses of remote established connections
|
||||
Returns set of IPv4/IPv6 host addresses of remote established connections
|
||||
on local or remote tcp port.
|
||||
|
||||
Parses output of shell 'ss' to get connections
|
||||
|
@ -1464,6 +1464,7 @@ def _netlink_tool_remote_on(port, which_end):
|
|||
LISTEN 0 511 *:80 *:*
|
||||
LISTEN 0 128 *:22 *:*
|
||||
ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505
|
||||
ESTAB 0 0 [::ffff:127.0.0.1]:41323 [::ffff:127.0.0.1]:4505
|
||||
'''
|
||||
remotes = set()
|
||||
valid = False
|
||||
|
@ -1486,7 +1487,7 @@ def _netlink_tool_remote_on(port, which_end):
|
|||
chunks = line.split()
|
||||
remote_host, remote_port = chunks[4].rsplit(':', 1)
|
||||
|
||||
remotes.add(remote_host)
|
||||
remotes.add(remote_host.strip("[]"))
|
||||
|
||||
if valid is False:
|
||||
remotes = None
|
||||
|
|
|
@ -39,7 +39,6 @@ try:
|
|||
import win32gui
|
||||
import win32api
|
||||
import win32con
|
||||
import pywintypes
|
||||
HAS_WINDOWS_MODULES = True
|
||||
except ImportError:
|
||||
HAS_WINDOWS_MODULES = False
|
||||
|
@ -198,7 +197,7 @@ def key_exists(hive, key, use_32bit_registry=False):
|
|||
try:
|
||||
handle = win32api.RegOpenKeyEx(hkey, local_key, 0, access_mask)
|
||||
return True
|
||||
except pywintypes.error as exc:
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
return False
|
||||
raise
|
||||
|
@ -231,7 +230,9 @@ def value_exists(hive, key, vname, use_32bit_registry=False):
|
|||
.. code-block:: python
|
||||
|
||||
import salt.utils.win_reg
|
||||
winreg.key_exists(hive='HKLM', key='SOFTWARE\\Microsoft')
|
||||
winreg.value_exists(hive='HKLM',
|
||||
key='SOFTWARE\\Microsoft\\Windows\\CurrentVersion',
|
||||
vname='CommonFilesDir')
|
||||
'''
|
||||
local_hive = _to_unicode(hive)
|
||||
local_key = _to_unicode(key)
|
||||
|
@ -246,7 +247,7 @@ def value_exists(hive, key, vname, use_32bit_registry=False):
|
|||
|
||||
try:
|
||||
handle = win32api.RegOpenKeyEx(hkey, local_key, 0, access_mask)
|
||||
except pywintypes.error as exc:
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
# The key containing the value/data pair does not exist
|
||||
return False
|
||||
|
@ -257,7 +258,7 @@ def value_exists(hive, key, vname, use_32bit_registry=False):
|
|||
_, _ = win32api.RegQueryValueEx(handle, local_vname)
|
||||
# value/data pair exists
|
||||
return True
|
||||
except pywintypes.error as exc:
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2 and vname is None:
|
||||
# value/data pair exists but is empty
|
||||
return True
|
||||
|
@ -349,9 +350,12 @@ def list_keys(hive, key=None, use_32bit_registry=False):
|
|||
else:
|
||||
subkeys.append(subkey)
|
||||
|
||||
except Exception: # pylint: disable=broad-except
|
||||
log.debug(r'Cannot find key: %s\%s', hive, key, exc_info=True)
|
||||
return False, r'Cannot find key: {0}\{1}'.format(hive, key)
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
log.debug(r'Cannot find key: %s\%s', hive, key, exc_info=True)
|
||||
return False, r'Cannot find key: {0}\{1}'.format(hive, key)
|
||||
raise
|
||||
|
||||
finally:
|
||||
if handle:
|
||||
handle.Close()
|
||||
|
@ -359,10 +363,14 @@ def list_keys(hive, key=None, use_32bit_registry=False):
|
|||
return subkeys
|
||||
|
||||
|
||||
def list_values(hive, key=None, use_32bit_registry=False, include_default=True):
|
||||
def list_values(hive, key=None, use_32bit_registry=False):
|
||||
'''
|
||||
Enumerates the values in a registry key or hive.
|
||||
|
||||
.. note::
|
||||
The ``(Default)`` value will only be returned if it is set, otherwise it
|
||||
will not be returned in the list of values.
|
||||
|
||||
Args:
|
||||
|
||||
hive (str):
|
||||
|
@ -382,9 +390,6 @@ def list_values(hive, key=None, use_32bit_registry=False, include_default=True):
|
|||
Accesses the 32bit portion of the registry on 64 bit installations.
|
||||
On 32bit machines this is ignored.
|
||||
|
||||
include_default (bool):
|
||||
Toggle whether to include the '(Default)' value.
|
||||
|
||||
Returns:
|
||||
list: A list of values under the hive or key.
|
||||
|
||||
|
@ -429,9 +434,13 @@ def list_values(hive, key=None, use_32bit_registry=False, include_default=True):
|
|||
else:
|
||||
value['vdata'] = vdata
|
||||
values.append(value)
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
log.debug(r'Cannot find key: %s\%s', hive, key, exc_info=True)
|
||||
return False, r'Cannot find key: {0}\{1}'.format(hive, key)
|
||||
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
log.debug(r'Cannot find key: %s\%s', hive, key)
|
||||
return False, r'Cannot find key: {0}\{1}'.format(hive, key)
|
||||
raise
|
||||
|
||||
finally:
|
||||
if handle:
|
||||
handle.Close()
|
||||
|
@ -535,23 +544,28 @@ def read_value(hive, key, vname=None, use_32bit_registry=False):
|
|||
ret['vdata'] = vdata
|
||||
else:
|
||||
ret['comment'] = 'Empty Value'
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2 and vname is None:
|
||||
ret['vdata'] = ('(value not set)')
|
||||
ret['vtype'] = 'REG_SZ'
|
||||
else:
|
||||
elif exc.winerror == 2:
|
||||
msg = 'Cannot find {0} in {1}\\{2}' \
|
||||
''.format(local_vname, local_hive, local_key)
|
||||
log.trace(exc)
|
||||
log.trace(msg)
|
||||
ret['comment'] = msg
|
||||
ret['success'] = False
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
msg = 'Cannot find key: {0}\\{1}'.format(local_hive, local_key)
|
||||
log.trace(exc)
|
||||
log.trace(msg)
|
||||
ret['comment'] = msg
|
||||
ret['success'] = False
|
||||
else:
|
||||
raise
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
msg = 'Cannot find key: {0}\\{1}'.format(local_hive, local_key)
|
||||
log.trace(exc)
|
||||
log.trace(msg)
|
||||
ret['comment'] = msg
|
||||
ret['success'] = False
|
||||
else:
|
||||
raise
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -705,15 +719,30 @@ def set_value(hive,
|
|||
|
||||
handle = None
|
||||
try:
|
||||
handle, _ = win32api.RegCreateKeyEx(hkey, local_key, access_mask,
|
||||
Options=create_options)
|
||||
win32api.RegSetValueEx(handle, local_vname, 0, vtype_value, local_vdata)
|
||||
win32api.RegFlushKey(handle)
|
||||
broadcast_change()
|
||||
return True
|
||||
except (win32api.error, SystemError, ValueError, TypeError): # pylint: disable=E0602
|
||||
log.exception('Encountered error setting registry value')
|
||||
handle, result = win32api.RegCreateKeyEx(hkey, local_key, access_mask,
|
||||
Options=create_options)
|
||||
msg = 'Created new key: %s\\%s' if result == 1 else \
|
||||
'Opened existing key: %s\\%s'
|
||||
log.debug(msg, local_hive, local_key)
|
||||
|
||||
try:
|
||||
win32api.RegSetValueEx(handle, local_vname, 0, vtype_value, local_vdata)
|
||||
win32api.RegFlushKey(handle)
|
||||
broadcast_change()
|
||||
return True
|
||||
except TypeError as exc:
|
||||
log.exception('"vdata" does not match the expected data type.\n%s',
|
||||
exc)
|
||||
return False
|
||||
except (SystemError, ValueError) as exc:
|
||||
log.exception('Encountered error setting registry value.\n%s', exc)
|
||||
return False
|
||||
|
||||
except win32api.error as exc:
|
||||
log.exception('Error creating/opening key: %s\\%s\n%s', local_hive,
|
||||
local_key, exc.winerror)
|
||||
return False
|
||||
|
||||
finally:
|
||||
if handle:
|
||||
win32api.RegCloseKey(handle)
|
||||
|
@ -771,7 +800,7 @@ def cast_vdata(vdata=None, vtype='REG_SZ'):
|
|||
return [_to_unicode(i) for i in vdata]
|
||||
# Make sure REG_QWORD is a 64 bit integer
|
||||
elif vtype_value == win32con.REG_QWORD:
|
||||
return vdata if six.PY3 else long(vdata) # pylint: disable=undefined-variable,incompatible-py3-code
|
||||
return int(vdata) if six.PY3 else long(vdata) # pylint: disable=undefined-variable,incompatible-py3-code
|
||||
# Everything else is int
|
||||
else:
|
||||
return int(vdata)
|
||||
|
@ -829,13 +858,12 @@ def delete_key_recursive(hive, key, use_32bit_registry=False):
|
|||
access_mask = registry.registry_32[use_32bit_registry] | win32con.KEY_ALL_ACCESS
|
||||
|
||||
if not key_exists(local_hive, local_key, use_32bit_registry):
|
||||
log.debug('"%s\\%s" not found', hive, key)
|
||||
return False
|
||||
|
||||
if (len(key) > 1) and (key.count('\\', 1) < registry.subkey_slash_check[hkey]):
|
||||
log.error(
|
||||
'Hive:%s Key:%s; key is too close to root, not safe to remove',
|
||||
hive, key
|
||||
)
|
||||
'"%s\\%s" is too close to root, not safe to remove', hive, key)
|
||||
return False
|
||||
|
||||
# Functions for traversing the registry tree
|
||||
|
@ -849,7 +877,7 @@ def delete_key_recursive(hive, key, use_32bit_registry=False):
|
|||
subkey = win32api.RegEnumKey(_key, i)
|
||||
yield _to_mbcs(subkey)
|
||||
i += 1
|
||||
except Exception: # pylint: disable=broad-except
|
||||
except win32api.error:
|
||||
break
|
||||
|
||||
def _traverse_registry_tree(_hkey, _keypath, _ret, _access_mask):
|
||||
|
@ -874,13 +902,21 @@ def delete_key_recursive(hive, key, use_32bit_registry=False):
|
|||
|
||||
# Delete all sub_keys
|
||||
for sub_key_path in key_list:
|
||||
key_handle = None
|
||||
try:
|
||||
key_handle = win32api.RegOpenKeyEx(hkey, sub_key_path, 0, access_mask)
|
||||
win32api.RegDeleteKey(key_handle, '')
|
||||
ret['Deleted'].append(r'{0}\{1}'.format(hive, sub_key_path))
|
||||
except WindowsError as exc: # pylint: disable=E0602
|
||||
try:
|
||||
win32api.RegDeleteKey(key_handle, '')
|
||||
ret['Deleted'].append(r'{0}\{1}'.format(hive, sub_key_path))
|
||||
except WindowsError as exc: # pylint: disable=undefined-variable
|
||||
log.error(exc, exc_info=True)
|
||||
ret['Failed'].append(r'{0}\{1} {2}'.format(hive, sub_key_path, exc))
|
||||
except win32api.error as exc:
|
||||
log.error(exc, exc_info=True)
|
||||
ret['Failed'].append(r'{0}\{1} {2}'.format(hive, sub_key_path, exc))
|
||||
ret['Failed'].append(r'{0}\{1} {2}'.format(hive, sub_key_path, exc.strerror))
|
||||
finally:
|
||||
if key_handle:
|
||||
win32api.CloseHandle(key_handle)
|
||||
|
||||
broadcast_change()
|
||||
|
||||
|
@ -940,16 +976,10 @@ def delete_value(hive, key, vname=None, use_32bit_registry=False):
|
|||
win32api.RegDeleteValue(handle, local_vname)
|
||||
broadcast_change()
|
||||
return True
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
except win32api.error as exc:
|
||||
if exc.winerror == 2:
|
||||
return None
|
||||
else:
|
||||
log.error(exc, exc_info=True)
|
||||
log.error('Hive: %s', local_hive)
|
||||
log.error('Key: %s', local_key)
|
||||
log.error('ValueName: %s', local_vname)
|
||||
log.error('32bit Reg: %s', use_32bit_registry)
|
||||
return False
|
||||
raise
|
||||
finally:
|
||||
if handle:
|
||||
win32api.RegCloseKey(handle)
|
||||
|
|
|
@ -134,6 +134,54 @@ class PkgModuleTest(ModuleCase, SaltReturnAssertsMixin):
|
|||
if repo is not None:
|
||||
self.run_function('pkg.del_repo', [repo])
|
||||
|
||||
def test_mod_del_repo_multiline_values(self):
|
||||
'''
|
||||
test modifying and deleting a software repository defined with multiline values
|
||||
'''
|
||||
os_grain = self.run_function('grains.item', ['os'])['os']
|
||||
repo = None
|
||||
try:
|
||||
if os_grain in ['CentOS', 'RedHat']:
|
||||
my_baseurl = 'http://my.fake.repo/foo/bar/\n http://my.fake.repo.alt/foo/bar/'
|
||||
expected_get_repo_baseurl = 'http://my.fake.repo/foo/bar/\nhttp://my.fake.repo.alt/foo/bar/'
|
||||
major_release = int(
|
||||
self.run_function(
|
||||
'grains.item',
|
||||
['osmajorrelease']
|
||||
)['osmajorrelease']
|
||||
)
|
||||
repo = 'fakerepo'
|
||||
name = 'Fake repo for RHEL/CentOS/SUSE'
|
||||
baseurl = my_baseurl
|
||||
gpgkey = 'https://my.fake.repo/foo/bar/MY-GPG-KEY.pub'
|
||||
failovermethod = 'priority'
|
||||
gpgcheck = 1
|
||||
enabled = 1
|
||||
ret = self.run_function(
|
||||
'pkg.mod_repo',
|
||||
[repo],
|
||||
name=name,
|
||||
baseurl=baseurl,
|
||||
gpgkey=gpgkey,
|
||||
gpgcheck=gpgcheck,
|
||||
enabled=enabled,
|
||||
failovermethod=failovermethod,
|
||||
)
|
||||
# return data from pkg.mod_repo contains the file modified at
|
||||
# the top level, so use next(iter(ret)) to get that key
|
||||
self.assertNotEqual(ret, {})
|
||||
repo_info = ret[next(iter(ret))]
|
||||
self.assertIn(repo, repo_info)
|
||||
self.assertEqual(repo_info[repo]['baseurl'], my_baseurl)
|
||||
ret = self.run_function('pkg.get_repo', [repo])
|
||||
self.assertEqual(ret['baseurl'], expected_get_repo_baseurl)
|
||||
self.run_function('pkg.mod_repo', [repo])
|
||||
ret = self.run_function('pkg.get_repo', [repo])
|
||||
self.assertEqual(ret['baseurl'], expected_get_repo_baseurl)
|
||||
finally:
|
||||
if repo is not None:
|
||||
self.run_function('pkg.del_repo', [repo])
|
||||
|
||||
@requires_salt_modules('pkg.owner')
|
||||
def test_owner(self):
|
||||
'''
|
||||
|
|
|
@ -12,8 +12,8 @@ import textwrap
|
|||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.helpers import destructiveTest, skip_if_not_root, flaky
|
||||
from tests.support.unit import skipIf, SkipTest
|
||||
from tests.support.helpers import destructiveTest, skip_if_not_root, flaky, requires_system_grains
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
|
@ -32,23 +32,33 @@ class SystemModuleTest(ModuleCase):
|
|||
'''
|
||||
Validate the date/time functions in the system module
|
||||
'''
|
||||
fmt_str = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
_hwclock_has_compare_ = None
|
||||
_systemd_timesyncd_available_ = None
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
@requires_system_grains
|
||||
def setUpClass(cls, grains): # pylint: disable=arguments-differ
|
||||
if grains['kernel'] != 'Linux':
|
||||
raise SkipTest(
|
||||
'Test not applicable to \'{kernel}\' kernel'.format(
|
||||
**grains
|
||||
)
|
||||
)
|
||||
cls.fmt_str = "%Y-%m-%d %H:%M:%S"
|
||||
cls._orig_time = None
|
||||
cls._machine_info = True
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
for name in ('fmt_str', '_orig_time', '_machine_info'):
|
||||
delattr(cls, name)
|
||||
|
||||
def setUp(self):
|
||||
super(SystemModuleTest, self).setUp()
|
||||
os_grain = self.run_function('grains.item', ['kernel'])
|
||||
if os_grain['kernel'] not in 'Linux':
|
||||
self.skipTest(
|
||||
'Test not applicable to \'{kernel}\' kernel'.format(
|
||||
**os_grain
|
||||
)
|
||||
)
|
||||
if self.run_function('service.available', ['systemd-timesyncd']):
|
||||
if self._systemd_timesyncd_available_ is None:
|
||||
SystemModuleTest._systemd_timesyncd_available_ = self.run_function('service.available', ['systemd-timesyncd'])
|
||||
if self._systemd_timesyncd_available_:
|
||||
self.run_function('service.stop', ['systemd-timesyncd'])
|
||||
|
||||
def tearDown(self):
|
||||
|
@ -58,7 +68,7 @@ class SystemModuleTest(ModuleCase):
|
|||
if self._machine_info is not True:
|
||||
self._restore_machine_info()
|
||||
self._machine_info = True
|
||||
if self.run_function('service.available', ['systemd-timesyncd']):
|
||||
if self._systemd_timesyncd_available_:
|
||||
self.run_function('service.start', ['systemd-timesyncd'])
|
||||
|
||||
def _save_time(self):
|
||||
|
@ -87,8 +97,12 @@ class SystemModuleTest(ModuleCase):
|
|||
systems where it's not present so that we can skip the
|
||||
comparison portion of the test.
|
||||
'''
|
||||
res = self.run_function('cmd.run_all', cmd='hwclock -h')
|
||||
return res['retcode'] == 0 and res['stdout'].find('--compare') > 0
|
||||
if self._hwclock_has_compare_ is None:
|
||||
res = self.run_function('cmd.run_all', cmd='hwclock -h')
|
||||
SystemModuleTest._hwclock_has_compare_ = (
|
||||
res['retcode'] == 0 and res['stdout'].find('--compare') > 0
|
||||
)
|
||||
return self._hwclock_has_compare_
|
||||
|
||||
def _test_hwclock_sync(self):
|
||||
'''
|
||||
|
|
|
@ -10,7 +10,7 @@ import time
|
|||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import skipIf
|
||||
from tests.support.case import ModuleCase, ShellCase
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import destructiveTest, flaky
|
||||
from tests.support.paths import FILES
|
||||
|
||||
|
@ -24,7 +24,7 @@ log = logging.getLogger(__name__)
|
|||
@destructiveTest
|
||||
@skipIf(not salt.utils.path.which('dockerd'), 'Docker not installed')
|
||||
@skipIf(not salt.utils.path.which('vault'), 'Vault not installed')
|
||||
class VaultTestCase(ModuleCase, ShellCase):
|
||||
class VaultTestCase(ModuleCase):
|
||||
'''
|
||||
Test vault module
|
||||
'''
|
||||
|
|
|
@ -7,12 +7,11 @@ Simple Smoke Tests for Connected SSH minions
|
|||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.case import ModuleCase
|
||||
from tests.support.helpers import requires_sshd_server
|
||||
from tests.support.case import SSHCase
|
||||
from tests.support.helpers import skip_if_not_root, requires_system_grains
|
||||
|
||||
|
||||
@requires_sshd_server
|
||||
class SSHMasterTestCase(ModuleCase):
|
||||
class SSHMasterTestCase(SSHCase):
|
||||
'''
|
||||
Test ssh master functionality
|
||||
'''
|
||||
|
@ -20,13 +19,15 @@ class SSHMasterTestCase(ModuleCase):
|
|||
'''
|
||||
Ensure the proxy can ping
|
||||
'''
|
||||
ret = self.run_function('test.ping', minion_tgt='localhost')
|
||||
ret = self.run_function('test.ping')
|
||||
self.assertEqual(ret, True)
|
||||
|
||||
def test_service(self):
|
||||
@requires_system_grains
|
||||
@skip_if_not_root
|
||||
def test_service(self, grains):
|
||||
service = 'cron'
|
||||
os_family = self.run_function('grains.get', ['os_family'], minion_tgt='localhost')
|
||||
os_release = self.run_function('grains.get', ['osrelease'], minion_tgt='localhost')
|
||||
os_family = grains['os_family']
|
||||
os_release = grains['osrelease']
|
||||
if os_family == 'RedHat':
|
||||
service = 'crond'
|
||||
elif os_family == 'Arch':
|
||||
|
@ -35,29 +36,30 @@ class SSHMasterTestCase(ModuleCase):
|
|||
service = 'org.ntp.ntpd'
|
||||
if int(os_release.split('.')[1]) >= 13:
|
||||
service = 'com.apple.AirPlayXPCHelper'
|
||||
ret = self.run_function('service.get_all', minion_tgt='localhost')
|
||||
ret = self.run_function('service.get_all')
|
||||
self.assertIn(service, ret)
|
||||
self.run_function('service.stop', [service], minion_tgt='localhost')
|
||||
ret = self.run_function('service.status', [service], minion_tgt='localhost')
|
||||
self.run_function('service.stop', [service])
|
||||
ret = self.run_function('service.status', [service])
|
||||
self.assertFalse(ret)
|
||||
self.run_function('service.start', [service], minion_tgt='localhost')
|
||||
ret = self.run_function('service.status', [service], minion_tgt='localhost')
|
||||
self.run_function('service.start', [service])
|
||||
ret = self.run_function('service.status', [service])
|
||||
self.assertTrue(ret)
|
||||
|
||||
def test_grains_items(self):
|
||||
os_family = self.run_function('grains.get', ['os_family'], minion_tgt='localhost')
|
||||
ret = self.run_function('grains.items', minion_tgt='localhost')
|
||||
@requires_system_grains
|
||||
def test_grains_items(self, grains):
|
||||
os_family = grains['os_family']
|
||||
ret = self.run_function('grains.items')
|
||||
if os_family == 'MacOS':
|
||||
self.assertEqual(ret['kernel'], 'Darwin')
|
||||
else:
|
||||
self.assertEqual(ret['kernel'], 'Linux')
|
||||
|
||||
def test_state_apply(self):
|
||||
ret = self.run_function('state.apply', ['core'], minion_tgt='localhost')
|
||||
ret = self.run_function('state.apply', ['core'])
|
||||
for key, value in ret.items():
|
||||
self.assertTrue(value['result'])
|
||||
|
||||
def test_state_highstate(self):
|
||||
ret = self.run_function('state.highstate', minion_tgt='localhost')
|
||||
ret = self.run_function('state.highstate')
|
||||
for key, value in ret.items():
|
||||
self.assertTrue(value['result'])
|
||||
|
|
|
@ -523,6 +523,30 @@ class AptPkgTestCase(TestCase, LoaderModuleMockMixin):
|
|||
self.assert_called_once(refresh_mock)
|
||||
refresh_mock.reset_mock()
|
||||
|
||||
def test_mod_repo_enabled(self):
|
||||
'''
|
||||
Checks if a repo is enabled or disabled depending on the passed kwargs.
|
||||
'''
|
||||
with patch.dict(aptpkg.__salt__, {'config.option': MagicMock(), 'no_proxy': MagicMock(return_value=False)}):
|
||||
with patch('salt.modules.aptpkg._check_apt', MagicMock(return_value=True)):
|
||||
with patch('salt.modules.aptpkg.refresh_db', MagicMock(return_value={})):
|
||||
with patch('salt.utils.data.is_true', MagicMock(return_value=True)) as data_is_true:
|
||||
with patch('salt.modules.aptpkg.sourceslist', MagicMock(), create=True):
|
||||
repo = aptpkg.mod_repo('foo', enabled=False)
|
||||
data_is_true.assert_called_with(False)
|
||||
# with disabled=True; should call salt.utils.data.is_true True
|
||||
data_is_true.reset_mock()
|
||||
repo = aptpkg.mod_repo('foo', disabled=True)
|
||||
data_is_true.assert_called_with(True)
|
||||
# with enabled=True; should call salt.utils.data.is_true with False
|
||||
data_is_true.reset_mock()
|
||||
repo = aptpkg.mod_repo('foo', enabled=True)
|
||||
data_is_true.assert_called_with(True)
|
||||
# with disabled=True; should call salt.utils.data.is_true False
|
||||
data_is_true.reset_mock()
|
||||
repo = aptpkg.mod_repo('foo', disabled=False)
|
||||
data_is_true.assert_called_with(False)
|
||||
|
||||
@patch('salt.utils.path.os_walk', MagicMock(return_value=[('test', 'test', 'test')]))
|
||||
@patch('os.path.getsize', MagicMock(return_value=123456))
|
||||
@patch('os.path.getctime', MagicMock(return_value=1234567890.123456))
|
||||
|
|
|
@ -126,6 +126,122 @@ class PkgresTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
self.assertIsNone(pkg_resource.sort_pkglist({}))
|
||||
|
||||
def test_format_pkg_list_no_attr(self):
|
||||
'''
|
||||
Test to output format of the package list with no attr parameter.
|
||||
'''
|
||||
packages = {
|
||||
'glibc': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'x86_64'}],
|
||||
'glibc.i686': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'i686'}],
|
||||
'foobar': [
|
||||
{'version': '1.2.0', 'epoch': '2', 'release': '7', 'arch': 'x86_64'},
|
||||
{'version': '1.2.3', 'epoch': '2', 'release': '27', 'arch': 'x86_64'},
|
||||
],
|
||||
'foobar.something': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}],
|
||||
'foobar.': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}]
|
||||
}
|
||||
expected_pkg_list = {
|
||||
'glibc': '2.12-1.212.el6',
|
||||
'glibc.i686': '2.12-1.212.el6',
|
||||
'foobar': '2:1.2.0-7,2:1.2.3-27',
|
||||
'foobar.something': '3:1.1-23.1',
|
||||
'foobar.': '3:1.1-23.1',
|
||||
}
|
||||
if six.PY3:
|
||||
self.assertCountEqual(pkg_resource.format_pkg_list(packages, False, None), expected_pkg_list)
|
||||
else:
|
||||
self.assertItemsEqual(pkg_resource.format_pkg_list(packages, False, None), expected_pkg_list)
|
||||
|
||||
def test_format_pkg_list_with_attr(self):
|
||||
'''
|
||||
Test to output format of the package list with attr parameter.
|
||||
In this case, any redundant "arch" reference will be removed from the package name since it's
|
||||
include as part of the requested attr.
|
||||
'''
|
||||
NAME_ARCH_MAPPING = {
|
||||
'glibc': {
|
||||
'name': 'glibc',
|
||||
'arch': None
|
||||
},
|
||||
'glibc.i686': {
|
||||
'name': 'glibc',
|
||||
'arch': 'i686'
|
||||
},
|
||||
'foobar': {
|
||||
'name': 'foobar',
|
||||
'arch': None
|
||||
},
|
||||
'foobar.something': {
|
||||
'name': 'foobar.something',
|
||||
'arch': None
|
||||
},
|
||||
'foobar.': {
|
||||
'name': 'foobar.',
|
||||
'arch': None
|
||||
}
|
||||
}
|
||||
packages = {
|
||||
'glibc': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'x86_64'}],
|
||||
'glibc.i686': [{'version': '2.12', 'epoch': None, 'release': '1.212.el6', 'arch': 'i686'}],
|
||||
'foobar': [
|
||||
{'version': '1.2.0', 'epoch': '2', 'release': '7', 'arch': 'x86_64'},
|
||||
{'version': '1.2.3', 'epoch': '2', 'release': '27', 'arch': 'x86_64'},
|
||||
],
|
||||
'foobar.something': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}],
|
||||
'foobar.': [{'version': '1.1', 'epoch': '3', 'release': '23.1', 'arch': 'i686'}]
|
||||
}
|
||||
expected_pkg_list = {
|
||||
'glibc': [
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'release': '1.212.el6',
|
||||
'epoch': None,
|
||||
'version': '2.12'
|
||||
},
|
||||
{
|
||||
'arch': 'i686',
|
||||
'release': '1.212.el6',
|
||||
'epoch': None,
|
||||
'version': '2.12'
|
||||
}
|
||||
],
|
||||
'foobar': [
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'release': '7',
|
||||
'epoch': '2',
|
||||
'version': '1.2.0'
|
||||
},
|
||||
{
|
||||
'arch': 'x86_64',
|
||||
'release': '27',
|
||||
'epoch': '2',
|
||||
'version': '1.2.3'
|
||||
}
|
||||
],
|
||||
'foobar.': [
|
||||
{
|
||||
'arch': 'i686',
|
||||
'release': '23.1',
|
||||
'epoch': '3',
|
||||
'version': '1.1'
|
||||
}
|
||||
],
|
||||
'foobar.something': [
|
||||
{
|
||||
'arch': 'i686',
|
||||
'release': '23.1',
|
||||
'epoch': '3',
|
||||
'version': '1.1'
|
||||
}
|
||||
]
|
||||
}
|
||||
with patch.dict(pkg_resource.__salt__, {'pkg.parse_arch': NAME_ARCH_MAPPING.get}):
|
||||
if six.PY3:
|
||||
self.assertCountEqual(pkg_resource.format_pkg_list(packages, False, attr=['epoch', 'release']), expected_pkg_list)
|
||||
else:
|
||||
self.assertItemsEqual(pkg_resource.format_pkg_list(packages, False, attr=['epoch', 'release']), expected_pkg_list)
|
||||
|
||||
def test_stringify(self):
|
||||
'''
|
||||
Test to takes a dict of package name/version information
|
||||
|
|
|
@ -1466,6 +1466,7 @@ class PostgresTestCase(TestCase, LoaderModuleMockMixin):
|
|||
locale=None,
|
||||
password='test',
|
||||
runas='postgres',
|
||||
checksums=False,
|
||||
user='postgres',
|
||||
)
|
||||
self.assertTrue(ret)
|
||||
|
|
|
@ -17,6 +17,10 @@ import salt.modules.pkg_resource as pkg_resource
|
|||
import salt.modules.win_pkg as win_pkg
|
||||
import salt.utils.data
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_reg as win_reg
|
||||
|
||||
# Import 3rd Party Libs
|
||||
from salt.ext import six
|
||||
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), "Must be on Windows!")
|
||||
|
@ -57,6 +61,12 @@ class WinPkgInstallTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'pkg_resource.stringify': pkg_resource.stringify,
|
||||
'config.valid_fileproto': config.valid_fileproto,
|
||||
},
|
||||
'__utils__': {
|
||||
'reg.key_exists': win_reg.key_exists,
|
||||
'reg.list_keys': win_reg.list_keys,
|
||||
'reg.read_value': win_reg.read_value,
|
||||
'reg.value_exists': win_reg.value_exists,
|
||||
},
|
||||
},
|
||||
pkg_resource: {
|
||||
'__grains__': {
|
||||
|
@ -65,6 +75,16 @@ class WinPkgInstallTestCase(TestCase, LoaderModuleMockMixin):
|
|||
},
|
||||
}
|
||||
|
||||
def test_pkg__get_reg_software(self):
|
||||
result = win_pkg._get_reg_software()
|
||||
self.assertTrue(isinstance(result, dict))
|
||||
found_python = False
|
||||
search = 'Python 2' if six.PY2 else 'Python 3'
|
||||
for key in result:
|
||||
if search in key:
|
||||
found_python = True
|
||||
self.assertTrue(found_python)
|
||||
|
||||
def test_pkg_install_not_found(self):
|
||||
'''
|
||||
Test pkg.install when the Version is NOT FOUND in the Software
|
||||
|
|
|
@ -16,6 +16,7 @@ from tests.support.mock import (
|
|||
# Import Salt libs
|
||||
from salt.exceptions import CommandExecutionError
|
||||
import salt.modules.rpm_lowpkg as rpm
|
||||
from salt.ext import six
|
||||
import salt.modules.yumpkg as yumpkg
|
||||
import salt.modules.pkg_resource as pkg_resource
|
||||
|
||||
|
@ -73,7 +74,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'os_family': 'RedHat',
|
||||
'osmajorrelease': 7,
|
||||
},
|
||||
}
|
||||
},
|
||||
pkg_resource: {}
|
||||
}
|
||||
|
||||
def test_list_pkgs(self):
|
||||
|
@ -104,7 +106,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
|
|||
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}), \
|
||||
patch.dict(pkg_resource.__salt__, {'pkg.parse_arch': yumpkg.parse_arch}):
|
||||
pkgs = yumpkg.list_pkgs(versions_as_list=True)
|
||||
for pkg_name, pkg_version in {
|
||||
'python-urlgrabber': '3.10-8.el7',
|
||||
|
@ -151,7 +154,8 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
|
|||
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}), \
|
||||
patch.dict(pkg_resource.__salt__, {'pkg.parse_arch': yumpkg.parse_arch}):
|
||||
pkgs = yumpkg.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
|
||||
for pkg_name, pkg_attr in {
|
||||
'python-urlgrabber': {
|
||||
|
@ -159,54 +163,63 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'release': '8.el7',
|
||||
'arch': 'noarch',
|
||||
'install_date_time_t': 1487838471,
|
||||
'epoch': None
|
||||
},
|
||||
'alsa-lib': {
|
||||
'version': '1.1.1',
|
||||
'release': '1.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838475,
|
||||
'epoch': None
|
||||
},
|
||||
'gnupg2': {
|
||||
'version': '2.0.22',
|
||||
'release': '4.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838477,
|
||||
'epoch': None
|
||||
},
|
||||
'rpm-python': {
|
||||
'version': '4.11.3',
|
||||
'release': '21.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838477,
|
||||
'epoch': None
|
||||
},
|
||||
'pygpgme': {
|
||||
'version': '0.3',
|
||||
'release': '9.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838478,
|
||||
'epoch': None
|
||||
},
|
||||
'yum': {
|
||||
'version': '3.4.3',
|
||||
'release': '150.el7.centos',
|
||||
'arch': 'noarch',
|
||||
'install_date_time_t': 1487838479,
|
||||
'epoch': None
|
||||
},
|
||||
'lzo': {
|
||||
'version': '2.06',
|
||||
'release': '8.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838479,
|
||||
'epoch': None
|
||||
},
|
||||
'qrencode-libs': {
|
||||
'version': '3.4.1',
|
||||
'release': '3.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838480,
|
||||
'epoch': None
|
||||
},
|
||||
'ustr': {
|
||||
'version': '1.0.4',
|
||||
'release': '16.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838480,
|
||||
'epoch': None
|
||||
},
|
||||
'shadow-utils': {
|
||||
'epoch': '2',
|
||||
|
@ -220,22 +233,133 @@ class YumTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'release': '33.el7',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838484,
|
||||
'epoch': None
|
||||
},
|
||||
'openssh': {
|
||||
'version': '6.6.1p1',
|
||||
'release': '33.el7_3',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1487838485,
|
||||
'epoch': None
|
||||
},
|
||||
'virt-what': {
|
||||
'version': '1.13',
|
||||
'release': '8.el7',
|
||||
'install_date_time_t': 1487838486,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
}}.items():
|
||||
|
||||
self.assertTrue(pkgs.get(pkg_name))
|
||||
self.assertEqual(pkgs[pkg_name], [pkg_attr])
|
||||
|
||||
def test_list_pkgs_with_attr_multiple_versions(self):
|
||||
'''
|
||||
Test packages listing with the attr parameter reporting multiple version installed
|
||||
|
||||
:return:
|
||||
'''
|
||||
def _add_data(data, key, value):
|
||||
data.setdefault(key, []).append(value)
|
||||
|
||||
rpm_out = [
|
||||
'glibc_|-(none)_|-2.12_|-1.212.el6_|-i686_|-(none)_|-1542394210'
|
||||
'glibc_|-(none)_|-2.12_|-1.212.el6_|-x86_64_|-(none)_|-1542394204',
|
||||
'virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486',
|
||||
'virt-what_|-(none)_|-1.10_|-2.el7_|-x86_64_|-(none)_|-1387838486',
|
||||
]
|
||||
with patch.dict(yumpkg.__grains__, {'osarch': 'x86_64'}), \
|
||||
patch.dict(yumpkg.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
patch.dict(yumpkg.__salt__, {'pkg_resource.stringify': MagicMock()}), \
|
||||
patch.dict(pkg_resource.__salt__, {'pkg.parse_arch': yumpkg.parse_arch}):
|
||||
pkgs = yumpkg.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
|
||||
expected_pkg_list = {
|
||||
'glibc': [
|
||||
{
|
||||
'version': '2.12',
|
||||
'release': '1.212.el6',
|
||||
'install_date_time_t': 1542394210,
|
||||
'arch': 'i686',
|
||||
'epoch': None
|
||||
},
|
||||
{
|
||||
'version': '2.12',
|
||||
'release': '1.212.el6',
|
||||
'install_date_time_t': 1542394204,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
}
|
||||
],
|
||||
'virt-what': [
|
||||
{
|
||||
'version': '1.10',
|
||||
'release': '2.el7',
|
||||
'install_date_time_t': 1387838486,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
},
|
||||
{
|
||||
'version': '1.13',
|
||||
'release': '8.el7',
|
||||
'install_date_time_t': 1487838486,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
}
|
||||
]
|
||||
}
|
||||
for pkgname, pkginfo in pkgs.items():
|
||||
if six.PY3:
|
||||
self.assertCountEqual(pkginfo, expected_pkg_list[pkgname])
|
||||
else:
|
||||
self.assertItemsEqual(pkginfo, expected_pkg_list[pkgname])
|
||||
|
||||
def test_list_patches(self):
|
||||
'''
|
||||
Test patches listing.
|
||||
|
||||
:return:
|
||||
'''
|
||||
yum_out = [
|
||||
'i my-fake-patch-not-installed-1234 recommended spacewalk-usix-2.7.5.2-2.2.noarch',
|
||||
' my-fake-patch-not-installed-1234 recommended spacewalksd-5.0.26.2-21.2.x86_64',
|
||||
'i my-fake-patch-not-installed-1234 recommended suseRegisterInfo-3.1.1-18.2.x86_64',
|
||||
'i my-fake-patch-installed-1234 recommended my-package-one-1.1-0.1.x86_64',
|
||||
'i my-fake-patch-installed-1234 recommended my-package-two-1.1-0.1.x86_64',
|
||||
]
|
||||
|
||||
expected_patches = {
|
||||
'my-fake-patch-not-installed-1234': {
|
||||
'installed': False,
|
||||
'summary': [
|
||||
'spacewalk-usix-2.7.5.2-2.2.noarch',
|
||||
'spacewalksd-5.0.26.2-21.2.x86_64',
|
||||
'suseRegisterInfo-3.1.1-18.2.x86_64',
|
||||
]
|
||||
},
|
||||
'my-fake-patch-installed-1234': {
|
||||
'installed': True,
|
||||
'summary': [
|
||||
'my-package-one-1.1-0.1.x86_64',
|
||||
'my-package-two-1.1-0.1.x86_64',
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
with patch.dict(yumpkg.__grains__, {'osarch': 'x86_64'}), \
|
||||
patch.dict(yumpkg.__salt__, {'cmd.run_stdout': MagicMock(return_value=os.linesep.join(yum_out))}):
|
||||
patches = yumpkg.list_patches()
|
||||
self.assertFalse(patches['my-fake-patch-not-installed-1234']['installed'])
|
||||
self.assertTrue(len(patches['my-fake-patch-not-installed-1234']['summary']) == 3)
|
||||
for _patch in expected_patches['my-fake-patch-not-installed-1234']['summary']:
|
||||
self.assertTrue(_patch in patches['my-fake-patch-not-installed-1234']['summary'])
|
||||
|
||||
self.assertTrue(patches['my-fake-patch-installed-1234']['installed'])
|
||||
self.assertTrue(len(patches['my-fake-patch-installed-1234']['summary']) == 2)
|
||||
for _patch in expected_patches['my-fake-patch-installed-1234']['summary']:
|
||||
self.assertTrue(_patch in patches['my-fake-patch-installed-1234']['summary'])
|
||||
|
||||
def test_latest_version_with_options(self):
|
||||
with patch.object(yumpkg, 'list_pkgs', MagicMock(return_value={})):
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ class ZypperTestCase(TestCase, LoaderModuleMockMixin):
|
|||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {zypper: {'rpm': None}}
|
||||
return {zypper: {'rpm': None}, pkg_resource: {}}
|
||||
|
||||
def setUp(self):
|
||||
self.new_repo_config = dict(
|
||||
|
@ -606,7 +606,8 @@ Repository 'DUMMY' not found by its alias, number, or URI.
|
|||
patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}):
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}), \
|
||||
patch.dict(pkg_resource.__salt__, {'pkg.parse_arch': zypper.parse_arch}):
|
||||
pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
|
||||
self.assertFalse(pkgs.get('gpg-pubkey', False))
|
||||
for pkg_name, pkg_attr in {
|
||||
|
@ -615,58 +616,130 @@ Repository 'DUMMY' not found by its alias, number, or URI.
|
|||
'release': '129.686',
|
||||
'arch': 'noarch',
|
||||
'install_date_time_t': 1498636511,
|
||||
'epoch': None,
|
||||
}],
|
||||
'yast2-ftp-server': [{
|
||||
'version': '3.1.8',
|
||||
'release': '8.1',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1499257798,
|
||||
'epoch': None,
|
||||
}],
|
||||
'protobuf-java': [{
|
||||
'version': '2.6.1',
|
||||
'release': '3.1.develHead',
|
||||
'install_date_time_t': 1499257756,
|
||||
'arch': 'noarch',
|
||||
'epoch': None,
|
||||
}],
|
||||
'susemanager-build-keys-web': [{
|
||||
'version': '12.0',
|
||||
'release': '5.1.develHead',
|
||||
'arch': 'noarch',
|
||||
'install_date_time_t': 1498636510,
|
||||
'epoch': None,
|
||||
}],
|
||||
'apache-commons-cli': [{
|
||||
'version': '1.2',
|
||||
'release': '1.233',
|
||||
'arch': 'noarch',
|
||||
'install_date_time_t': 1498636510,
|
||||
'epoch': None,
|
||||
}],
|
||||
'kernel-default': [{
|
||||
'version': '4.4.138',
|
||||
'release': '94.39.1',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1529936067
|
||||
'install_date_time_t': 1529936067,
|
||||
'epoch': None,
|
||||
},
|
||||
{
|
||||
'version': '4.4.73',
|
||||
'release': '5.1',
|
||||
'arch': 'x86_64',
|
||||
'install_date_time_t': 1503572639,
|
||||
'epoch': None,
|
||||
}],
|
||||
'perseus-dummy.i586': [{
|
||||
'perseus-dummy': [{
|
||||
'version': '1.1',
|
||||
'release': '1.1',
|
||||
'arch': 'i586',
|
||||
'install_date_time_t': 1529936062,
|
||||
'epoch': None,
|
||||
}],
|
||||
'jose4j': [{
|
||||
'arch': 'noarch',
|
||||
'version': '0.4.4',
|
||||
'release': '2.1.develHead',
|
||||
'install_date_time_t': 1499257756,
|
||||
'epoch': None,
|
||||
}]}.items():
|
||||
self.assertTrue(pkgs.get(pkg_name))
|
||||
self.assertEqual(pkgs[pkg_name], pkg_attr)
|
||||
|
||||
def test_list_pkgs_with_attr_multiple_versions(self):
|
||||
'''
|
||||
Test packages listing with the attr parameter reporting multiple version installed
|
||||
|
||||
:return:
|
||||
'''
|
||||
def _add_data(data, key, value):
|
||||
data.setdefault(key, []).append(value)
|
||||
|
||||
rpm_out = [
|
||||
'glibc_|-2.12_|-1.212.el6_|-i686_|-_|-1542394210',
|
||||
'glibc_|-2.12_|-1.212.el6_|-x86_64_|-_|-1542394204',
|
||||
'virt-what_|-1.13_|-8.el7_|-x86_64_|-_|-1487838486',
|
||||
'virt-what_|-1.10_|-2.el7_|-x86_64_|-_|-1387838486',
|
||||
]
|
||||
|
||||
with patch.dict(zypper.__grains__, {'osarch': 'x86_64'}), \
|
||||
patch.dict(zypper.__salt__, {'cmd.run': MagicMock(return_value=os.linesep.join(rpm_out))}), \
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.add_pkg': _add_data}), \
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.format_pkg_list': pkg_resource.format_pkg_list}), \
|
||||
patch.dict(zypper.__salt__, {'pkg_resource.stringify': MagicMock()}), \
|
||||
patch.dict(pkg_resource.__salt__, {'pkg.parse_arch': zypper.parse_arch}):
|
||||
pkgs = zypper.list_pkgs(attr=['epoch', 'release', 'arch', 'install_date_time_t'])
|
||||
expected_pkg_list = {
|
||||
'glibc': [
|
||||
{
|
||||
'version': '2.12',
|
||||
'release': '1.212.el6',
|
||||
'install_date_time_t': 1542394210,
|
||||
'arch': 'i686',
|
||||
'epoch': None
|
||||
},
|
||||
{
|
||||
'version': '2.12',
|
||||
'release': '1.212.el6',
|
||||
'install_date_time_t': 1542394204,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
}
|
||||
],
|
||||
'virt-what': [
|
||||
{
|
||||
'version': '1.10',
|
||||
'release': '2.el7',
|
||||
'install_date_time_t': 1387838486,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
},
|
||||
{
|
||||
'version': '1.13',
|
||||
'release': '8.el7',
|
||||
'install_date_time_t': 1487838486,
|
||||
'arch': 'x86_64',
|
||||
'epoch': None
|
||||
}
|
||||
]
|
||||
}
|
||||
for pkgname, pkginfo in pkgs.items():
|
||||
if six.PY3:
|
||||
self.assertCountEqual(pkginfo, expected_pkg_list[pkgname])
|
||||
else:
|
||||
self.assertItemsEqual(pkginfo, expected_pkg_list[pkgname])
|
||||
|
||||
def test_list_patches(self):
|
||||
'''
|
||||
Test advisory patches listing.
|
||||
|
|
|
@ -286,7 +286,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
'user.current': mock_user}),\
|
||||
patch.dict(filestate.__opts__, {'test': False}),\
|
||||
patch.object(os.path, 'isdir', mock_t),\
|
||||
patch.object(os.path, 'exists', mock_f),\
|
||||
patch.object(os.path, 'exists', mock_t),\
|
||||
patch.object(os.path, 'lexists', mock_t),\
|
||||
patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
|
||||
comt = 'Symlink & backup dest exists and Force not set. {0} -> ' \
|
||||
|
@ -307,6 +307,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
'user.info': mock_empty,
|
||||
'user.current': mock_user}),\
|
||||
patch.dict(filestate.__opts__, {'test': False}),\
|
||||
patch.object(os.path, 'exists', mock_t),\
|
||||
patch.object(os.path, 'isfile', mock_t), \
|
||||
patch.object(os.path, 'isdir', mock_t),\
|
||||
patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
|
||||
|
@ -327,7 +328,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
'user.current': mock_user}),\
|
||||
patch.dict(filestate.__opts__, {'test': False}),\
|
||||
patch.object(os.path, 'isdir', mock_t),\
|
||||
patch.object(os.path, 'exists', mock_f),\
|
||||
patch.object(os.path, 'exists', mock_t),\
|
||||
patch.object(os.path, 'isfile', mock_t),\
|
||||
patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
|
||||
comt = 'File exists where the symlink {0} should be'.format(name)
|
||||
|
@ -349,7 +350,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
patch.dict(filestate.__opts__, {'test': False}),\
|
||||
patch.object(os.path, 'isdir', MagicMock(side_effect=[True, False])),\
|
||||
patch.object(os.path, 'isdir', mock_t),\
|
||||
patch.object(os.path, 'exists', mock_f),\
|
||||
patch.object(os.path, 'exists', mock_t),\
|
||||
patch('salt.utils.win_functions.get_sid_from_name', return_value='test-sid'):
|
||||
comt = 'Directory exists where the symlink {0} should be'.format(name)
|
||||
ret = return_val({'comment': comt,
|
||||
|
@ -1468,7 +1469,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
with patch.object(os.path, 'isabs', mock_t):
|
||||
with patch.dict(filestate.__salt__,
|
||||
{'file.search': MagicMock(side_effect=[True, True, True, False, False])}):
|
||||
{'file.search': MagicMock(side_effect=[False, True, False, False])}):
|
||||
comt = ('Pattern already commented')
|
||||
ret.update({'comment': comt, 'result': True})
|
||||
self.assertDictEqual(filestate.comment(name, regex), ret)
|
||||
|
@ -1478,7 +1479,7 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
self.assertDictEqual(filestate.comment(name, regex), ret)
|
||||
|
||||
with patch.dict(filestate.__salt__,
|
||||
{'file.search': MagicMock(side_effect=[False, True, False, True, True]),
|
||||
{'file.search': MagicMock(side_effect=[True, True, True]),
|
||||
'file.comment': mock_t,
|
||||
'file.comment_line': mock_t}):
|
||||
with patch.dict(filestate.__opts__, {'test': True}):
|
||||
|
@ -1516,7 +1517,9 @@ class TestFileState(TestCase, LoaderModuleMockMixin):
|
|||
|
||||
mock_t = MagicMock(return_value=True)
|
||||
mock_f = MagicMock(return_value=False)
|
||||
mock = MagicMock(side_effect=[True, False, False, False, True, False,
|
||||
mock = MagicMock(side_effect=[False, True,
|
||||
False, False,
|
||||
True,
|
||||
True, True])
|
||||
with patch.object(os.path, 'isabs', mock_f):
|
||||
comt = ('Specified file {0} is not an absolute path'.format(name))
|
||||
|
|
|
@ -7,12 +7,14 @@
|
|||
from __future__ import absolute_import, unicode_literals, print_function
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.unit import TestCase, skipIf
|
||||
import salt.utils.platform
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.states.win_lgpo as win_lgpo
|
||||
|
||||
|
||||
@skipIf(not salt.utils.platform.is_windows(), 'LGPO not applicable')
|
||||
class WinSystemTestCase(TestCase):
|
||||
'''
|
||||
Test cases for the win_lgpo state
|
||||
|
|
56
tests/unit/test_engines.py
Normal file
56
tests/unit/test_engines.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
unit tests for the Salt engines
|
||||
'''
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import, print_function, unicode_literals
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from tests.support.mixins import LoaderModuleMockMixin
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.mock import (
|
||||
patch)
|
||||
|
||||
# Import Salt Libs
|
||||
import salt.engines as engines
|
||||
import salt.config
|
||||
import salt.utils.process
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
||||
import logging
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EngineTestCase(TestCase, LoaderModuleMockMixin):
|
||||
'''
|
||||
Test cases for salt.engine.sqs_events
|
||||
'''
|
||||
|
||||
def setup_loader_modules(self):
|
||||
return {engines: {}}
|
||||
|
||||
def test_engine_module(self):
|
||||
'''
|
||||
Test
|
||||
'''
|
||||
mock_opts = salt.config.DEFAULT_MINION_OPTS.copy()
|
||||
mock_opts['__role'] = 'minion'
|
||||
mock_opts['engines'] = [{'test_one': {'engine_module': 'test'}},
|
||||
{'test_two': {'engine_module': 'test'}}]
|
||||
|
||||
process_manager = salt.utils.process.ProcessManager()
|
||||
with patch.dict(engines.__opts__, mock_opts):
|
||||
salt.engines.start_engines(mock_opts, process_manager)
|
||||
process_map = process_manager._process_map
|
||||
count = 0
|
||||
for proc in six.iterkeys(process_map):
|
||||
count += 1
|
||||
fun = process_map[proc]['Process'].fun
|
||||
|
||||
# Ensure function is start from the test engine
|
||||
self.assertEqual(fun, 'test.start')
|
||||
|
||||
# Ensure there were two engine started
|
||||
self.assertEqual(count, len(mock_opts['engines']))
|
|
@ -1353,3 +1353,31 @@ class LazyLoaderOptimizationOrderTest(TestCase):
|
|||
basename = os.path.basename(filename)
|
||||
expected = 'lazyloadertest.py' if six.PY3 else 'lazyloadertest.pyc'
|
||||
assert basename == expected, basename
|
||||
|
||||
|
||||
class LoaderLoadCachedGrainsTest(TestCase):
|
||||
'''
|
||||
Test how the loader works with cached grains
|
||||
'''
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
cls.opts = salt.config.minion_config(None)
|
||||
if not os.path.isdir(RUNTIME_VARS.TMP):
|
||||
os.makedirs(RUNTIME_VARS.TMP)
|
||||
|
||||
def setUp(self):
|
||||
self.cache_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
|
||||
self.addCleanup(shutil.rmtree, self.cache_dir, ignore_errors=True)
|
||||
|
||||
self.opts['cachedir'] = self.cache_dir
|
||||
self.opts['grains_cache'] = True
|
||||
self.opts['grains'] = salt.loader.grains(self.opts)
|
||||
|
||||
def test_osrelease_info_has_correct_type(self):
|
||||
'''
|
||||
Make sure osrelease_info is tuple after caching
|
||||
'''
|
||||
grains = salt.loader.grains(self.opts)
|
||||
osrelease_info = grains['osrelease_info']
|
||||
assert isinstance(osrelease_info, tuple), osrelease_info
|
||||
|
|
|
@ -309,6 +309,38 @@ class MinionTestCase(TestCase, AdaptedConfigurationTestCaseMixin):
|
|||
finally:
|
||||
minion.destroy()
|
||||
|
||||
def test_when_passed_start_event_grains(self):
|
||||
mock_opts = self.get_config('minion', from_scratch=True)
|
||||
mock_opts['start_event_grains'] = ["os"]
|
||||
io_loop = tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
try:
|
||||
minion.tok = MagicMock()
|
||||
minion._send_req_sync = MagicMock()
|
||||
minion._fire_master('Minion has started', 'minion_start')
|
||||
load = minion._send_req_sync.call_args[0][0]
|
||||
|
||||
self.assertTrue('grains' in load)
|
||||
self.assertTrue('os' in load['grains'])
|
||||
finally:
|
||||
minion.destroy()
|
||||
|
||||
def test_when_not_passed_start_event_grains(self):
|
||||
mock_opts = self.get_config('minion', from_scratch=True)
|
||||
io_loop = tornado.ioloop.IOLoop()
|
||||
io_loop.make_current()
|
||||
minion = salt.minion.Minion(mock_opts, io_loop=io_loop)
|
||||
try:
|
||||
minion.tok = MagicMock()
|
||||
minion._send_req_sync = MagicMock()
|
||||
minion._fire_master('Minion has started', 'minion_start')
|
||||
load = minion._send_req_sync.call_args[0][0]
|
||||
|
||||
self.assertTrue('grains' not in load)
|
||||
finally:
|
||||
minion.destroy()
|
||||
|
||||
def test_minion_retry_dns_count(self):
|
||||
'''
|
||||
Tests that the resolve_dns will retry dns look ups for a maximum of
|
||||
|
|
|
@ -125,6 +125,14 @@ ESTAB 0 0 127.0.0.1:56726 127.0.
|
|||
ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505
|
||||
'''
|
||||
|
||||
LINUX_NETLINK_SS_OUTPUT = '''\
|
||||
State Recv-Q Send-Q Local Address:Port Peer Address:Port
|
||||
TIME-WAIT 0 0 [::1]:8009 [::1]:40368
|
||||
LISTEN 0 128 127.0.0.1:5903 0.0.0.0:*
|
||||
ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315
|
||||
ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545
|
||||
'''
|
||||
|
||||
IPV4_SUBNETS = {True: ('10.10.0.0/24',),
|
||||
False: ('10.10.0.0', '10.10.0.0/33', 'FOO', 9, '0.9.800.1000/24')}
|
||||
IPV6_SUBNETS = {True: ('::1/128',),
|
||||
|
@ -487,6 +495,19 @@ class NetworkTestCase(TestCase):
|
|||
remotes = network._freebsd_remotes_on('4506', 'remote')
|
||||
self.assertEqual(remotes, set(['127.0.0.1']))
|
||||
|
||||
def test_netlink_tool_remote_on_a(self):
|
||||
with patch('salt.utils.platform.is_sunos', lambda: False):
|
||||
with patch('salt.utils.platform.is_linux', lambda: True):
|
||||
with patch('subprocess.check_output',
|
||||
return_value=LINUX_NETLINK_SS_OUTPUT):
|
||||
remotes = network._netlink_tool_remote_on('4506', 'local')
|
||||
self.assertEqual(remotes, set(['192.168.122.177', '::ffff:127.0.0.1']))
|
||||
|
||||
def test_netlink_tool_remote_on_b(self):
|
||||
with patch('subprocess.check_output', return_value=NETLINK_SS):
|
||||
remotes = network._netlink_tool_remote_on('4505', 'remote_port')
|
||||
self.assertEqual(remotes, set(['127.0.0.1', '::ffff:1.2.3.4']))
|
||||
|
||||
def test_generate_minion_id_distinct(self):
|
||||
'''
|
||||
Test if minion IDs are distinct in the pool.
|
||||
|
@ -671,8 +692,3 @@ class NetworkTestCase(TestCase):
|
|||
# An exception is raised if unicode is passed to socket.getfqdn
|
||||
minion_id = network.generate_minion_id()
|
||||
assert minion_id != '', minion_id
|
||||
|
||||
def test_netlink_tool_remote_on(self):
|
||||
with patch('subprocess.check_output', return_value=NETLINK_SS):
|
||||
remotes = network._netlink_tool_remote_on('4505', 'remote_port')
|
||||
self.assertEqual(remotes, set(['127.0.0.1', '::ffff:1.2.3.4']))
|
||||
|
|
File diff suppressed because it is too large
Load diff
Loading…
Add table
Reference in a new issue