mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #23675 from basepi/merge-forward-2015.5
[2015.5] Merge forward from 2014.7 to 2015.5
This commit is contained in:
commit
e480f13688
16 changed files with 265 additions and 141 deletions
6
debian/changelog
vendored
6
debian/changelog
vendored
|
@ -1,3 +1,9 @@
|
|||
salt (2014.7.1-1) unstable; urgency=low
|
||||
|
||||
* Bump to 2014.7.1
|
||||
|
||||
-- Manuel Torrinha <mtorrinha86@gmail.com> Thu, 11 May 2015 15:54:00 -0000
|
||||
|
||||
salt (2014.1.0-1) unstable; urgency=low
|
||||
|
||||
* New upstream version
|
||||
|
|
|
@ -52,8 +52,9 @@ Var MinionName_State
|
|||
Page custom nsDialogsPage nsDialogsPageLeave
|
||||
; Instfiles page
|
||||
!insertmacro MUI_PAGE_INSTFILES
|
||||
|
||||
; Finish page
|
||||
!define MUI_FINISHPAGE_RUN "sc"
|
||||
!define MUI_FINISHPAGE_RUN "net"
|
||||
!define MUI_FINISHPAGE_RUN_PARAMETERS "start salt-minion"
|
||||
!insertmacro MUI_PAGE_FINISH
|
||||
|
||||
|
@ -246,15 +247,13 @@ Section -Post
|
|||
WriteRegStr ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" "URLInfoAbout" "${PRODUCT_WEB_SITE}"
|
||||
WriteRegStr ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}" "Publisher" "${PRODUCT_PUBLISHER}"
|
||||
WriteRegStr HKLM "SYSTEM\CurrentControlSet\services\salt-minion" "DependOnService" "nsi"
|
||||
|
||||
ExecWait "nssm.exe install salt-minion $INSTDIR\bin\python.exe $INSTDIR\bin\Scripts\salt-minion -c $INSTDIR\conf -l quiet"
|
||||
RMDir /R "$INSTDIR\var\cache\salt" ; removing cache from old version
|
||||
|
||||
Call updateMinionConfig
|
||||
SectionEnd
|
||||
|
||||
Function .onInstSuccess
|
||||
Exec "nssm.exe install salt-minion $INSTDIR\bin\python.exe $INSTDIR\bin\Scripts\salt-minion -c $INSTDIR\conf -l quiet"
|
||||
RMDir /R "$INSTDIR\var\cache\salt" ; removing cache from old version
|
||||
ExecWait "net start salt-minion"
|
||||
FunctionEnd
|
||||
|
||||
Function un.onUninstSuccess
|
||||
HideWindow
|
||||
MessageBox MB_ICONINFORMATION|MB_OK "$(^Name) was successfully removed from your computer." /SD IDOK
|
||||
|
|
|
@ -100,20 +100,22 @@ def lowstate_file_refs(chunks, extras=''):
|
|||
return refs
|
||||
|
||||
|
||||
def salt_refs(data):
|
||||
def salt_refs(data, ret=None):
|
||||
'''
|
||||
Pull salt file references out of the states
|
||||
'''
|
||||
proto = 'salt://'
|
||||
ret = []
|
||||
if ret is None:
|
||||
ret = []
|
||||
if isinstance(data, str):
|
||||
if data.startswith(proto):
|
||||
return [data]
|
||||
if data.startswith(proto) and data not in ret:
|
||||
ret.append(data)
|
||||
if isinstance(data, list):
|
||||
for comp in data:
|
||||
if isinstance(comp, str):
|
||||
if comp.startswith(proto):
|
||||
ret.append(comp)
|
||||
salt_refs(comp, ret)
|
||||
if isinstance(data, dict):
|
||||
for comp in data:
|
||||
salt_refs(data[comp], ret)
|
||||
return ret
|
||||
|
||||
|
||||
|
@ -155,7 +157,7 @@ def prep_trans_tar(file_client, chunks, file_refs, pillar=None):
|
|||
if not os.path.isdir(tgt_dir):
|
||||
os.makedirs(tgt_dir)
|
||||
shutil.copy(path, tgt)
|
||||
break
|
||||
continue
|
||||
files = file_client.cache_dir(name, saltenv)
|
||||
if files:
|
||||
for filename in files:
|
||||
|
@ -171,7 +173,7 @@ def prep_trans_tar(file_client, chunks, file_refs, pillar=None):
|
|||
if not os.path.isdir(tgt_dir):
|
||||
os.makedirs(tgt_dir)
|
||||
shutil.copy(filename, tgt)
|
||||
break
|
||||
continue
|
||||
cwd = os.getcwd()
|
||||
os.chdir(gendir)
|
||||
with closing(tarfile.open(trans_tar, 'w:gz')) as tfp:
|
||||
|
|
|
@ -800,14 +800,35 @@ def list_nodes(call=None, **kwargs):
|
|||
return {}
|
||||
for server in server_list:
|
||||
server_tmp = conn.server_show(server_list[server]['id'])[server]
|
||||
|
||||
private = []
|
||||
public = []
|
||||
if 'addresses' not in server_tmp:
|
||||
server_tmp['addresses'] = {}
|
||||
for network in server_tmp['addresses'].keys():
|
||||
for address in server_tmp['addresses'][network]:
|
||||
if salt.utils.cloud.is_public_ip(address.get('addr', '')):
|
||||
public.append(address['addr'])
|
||||
elif ':' in address['addr']:
|
||||
public.append(address['addr'])
|
||||
elif '.' in address['addr']:
|
||||
private.append(address['addr'])
|
||||
|
||||
if server_tmp['accessIPv4']:
|
||||
if salt.utils.cloud.is_public_ip(server_tmp['accessIPv4']):
|
||||
public.append(server_tmp['accessIPv4'])
|
||||
else:
|
||||
private.append(server_tmp['accessIPv4'])
|
||||
if server_tmp['accessIPv6']:
|
||||
public.append(server_tmp['accessIPv6'])
|
||||
|
||||
ret[server] = {
|
||||
'id': server_tmp['id'],
|
||||
'image': server_tmp['image']['id'],
|
||||
'size': server_tmp['flavor']['id'],
|
||||
'state': server_tmp['state'],
|
||||
'private_ips': [addrs['addr'] for addrs in
|
||||
server_tmp['addresses'].get('private', [])],
|
||||
'public_ips': [server_tmp['accessIPv4'], server_tmp['accessIPv6']],
|
||||
'private_ips': public,
|
||||
'public_ips': private,
|
||||
}
|
||||
return ret
|
||||
|
||||
|
|
|
@ -218,7 +218,9 @@ def reap_fileserver_cache_dir(cache_base, find_func):
|
|||
# This will only remove the directory on the second time
|
||||
# "_reap_cache" is called (which is intentional)
|
||||
if len(dirs) == 0 and len(files) == 0:
|
||||
os.rmdir(root)
|
||||
# only remove if empty directory is older than 60s
|
||||
if time.time() - os.path.getctime(root) > 60:
|
||||
os.rmdir(root)
|
||||
continue
|
||||
# if not, lets check the files in the directory
|
||||
for file_ in files:
|
||||
|
|
|
@ -1173,10 +1173,16 @@ class LazyLoader(salt.utils.lazy.LazyDict):
|
|||
end, module_name)
|
||||
log.warning(msg)
|
||||
else:
|
||||
virtual = mod.__virtual__()
|
||||
if isinstance(virtual, tuple):
|
||||
error_reason = virtual[1]
|
||||
virtual = virtual[0]
|
||||
try:
|
||||
virtual = mod.__virtual__()
|
||||
if isinstance(virtual, tuple):
|
||||
error_reason = virtual[1]
|
||||
virtual = virtual[0]
|
||||
except Exception as exc:
|
||||
log.error('Exception raised when processing __virtual__ function'
|
||||
' for {0}. Module will not be loaded {1}'.format(
|
||||
module_name, exc))
|
||||
virtual = None
|
||||
# Get the module's virtual name
|
||||
virtualname = getattr(mod, '__virtualname__', virtual)
|
||||
if not virtual:
|
||||
|
|
|
@ -795,6 +795,10 @@ class Minion(MinionBase):
|
|||
' {0}'.format(opts['master']))
|
||||
if opts['master_shuffle']:
|
||||
shuffle(opts['master'])
|
||||
elif isinstance(opts['master'], str):
|
||||
# We have a string, but a list was what was intended. Convert.
|
||||
# See issue 23611 for details
|
||||
opts['master'] = list(opts['master'])
|
||||
elif opts['__role'] == 'syndic':
|
||||
log.info('Syndic setting master_syndic to \'{0}\''.format(opts['master']))
|
||||
|
||||
|
|
|
@ -1836,7 +1836,8 @@ def build_network_settings(**settings):
|
|||
|
||||
# Write hostname to /etc/hostname
|
||||
sline = opts['hostname'].split('.', 1)
|
||||
hostname = '{0}\n' . format(sline[0])
|
||||
opts['hostname'] = sline[0]
|
||||
hostname = '{0}\n' . format(opts['hostname'])
|
||||
current_domainname = current_network_settings['domainname']
|
||||
|
||||
# Only write the hostname if it has changed
|
||||
|
|
|
@ -408,7 +408,7 @@ def refresh_db():
|
|||
cmd = 'emerge-delta-webrsync -q'
|
||||
return __salt__['cmd.retcode'](cmd, python_shell=False) == 0
|
||||
else:
|
||||
if __salt__['cmd.retcode']('emerge --sync --ask n --quiet',
|
||||
if __salt__['cmd.retcode']('emerge --ask n --quiet --sync',
|
||||
python_shell=False) == 0:
|
||||
return True
|
||||
# We fall back to "webrsync" if "rsync" fails for some reason
|
||||
|
@ -613,7 +613,7 @@ def install(name=None,
|
|||
targets.append(target)
|
||||
else:
|
||||
targets = pkg_params
|
||||
cmd = 'emerge --quiet {0} --ask n {1} {2}'.format(bin_opts, emerge_opts, ' '.join(targets))
|
||||
cmd = 'emerge --ask n --quiet {0} {1} {2}'.format(bin_opts, emerge_opts, ' '.join(targets))
|
||||
|
||||
old = list_pkgs()
|
||||
call = __salt__['cmd.run_all'](cmd,
|
||||
|
@ -673,7 +673,7 @@ def update(pkg, slot=None, fromrepo=None, refresh=False, binhost=None):
|
|||
bin_opts = ''
|
||||
|
||||
old = list_pkgs()
|
||||
cmd = 'emerge --update --newuse --oneshot --ask n --quiet {0} {1}'.format(bin_opts, full_atom)
|
||||
cmd = 'emerge --ask n --quiet --update --newuse --oneshot {0} {1}'.format(bin_opts, full_atom)
|
||||
call = __salt__['cmd.run_all'](cmd,
|
||||
output_loglevel='trace',
|
||||
python_shell=False)
|
||||
|
@ -787,7 +787,7 @@ def remove(name=None, slot=None, fromrepo=None, pkgs=None, **kwargs):
|
|||
|
||||
if not targets:
|
||||
return {}
|
||||
cmd = 'emerge --unmerge --quiet --quiet-unmerge-warn --ask n' \
|
||||
cmd = 'emerge --ask n --quiet --unmerge --quiet-unmerge-warn ' \
|
||||
'{0}'.format(' '.join(targets))
|
||||
__salt__['cmd.run_all'](cmd,
|
||||
output_loglevel='trace',
|
||||
|
@ -879,7 +879,7 @@ def depclean(name=None, slot=None, fromrepo=None, pkgs=None):
|
|||
else:
|
||||
targets = [x for x in pkg_params if x in old]
|
||||
|
||||
cmd = 'emerge --depclean --ask n --quiet {0}'.format(' '.join(targets))
|
||||
cmd = 'emerge --ask n --quiet --depclean {0}'.format(' '.join(targets))
|
||||
__salt__['cmd.run_all'](cmd,
|
||||
output_loglevel='trace',
|
||||
python_shell=False)
|
||||
|
|
|
@ -258,7 +258,7 @@ def setval(key, val, destructive=False):
|
|||
return setvals({key: val}, destructive)
|
||||
|
||||
|
||||
def append(key, val, convert=False):
|
||||
def append(key, val, convert=False, delimiter=':'):
|
||||
'''
|
||||
.. versionadded:: 0.17.0
|
||||
|
||||
|
@ -276,13 +276,19 @@ def append(key, val, convert=False):
|
|||
If convert is False and the grain contains non-list contents, an error
|
||||
is given. Defaults to False.
|
||||
|
||||
:param delimiter: The key can be a nested dict key. Use this parameter to
|
||||
specify the delimiter you use.
|
||||
You can now append values to a list in nested dictionnary grains. If the
|
||||
list doesn't exist at this level, it will be created.
|
||||
.. versionadded:: 2014.7.6
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.append key val
|
||||
'''
|
||||
grains = get(key, [])
|
||||
grains = get(key, [], delimiter)
|
||||
if not isinstance(grains, list) and convert is True:
|
||||
grains = [grains]
|
||||
if not isinstance(grains, list):
|
||||
|
@ -290,6 +296,14 @@ def append(key, val, convert=False):
|
|||
if val in grains:
|
||||
return 'The val {0} was already in the list {1}'.format(val, key)
|
||||
grains.append(val)
|
||||
|
||||
while delimiter in key:
|
||||
key, rest = key.rsplit(delimiter, 1)
|
||||
_grain = get(key, _infinitedict(), delimiter)
|
||||
if isinstance(_grain, dict):
|
||||
_grain.update({rest: grains})
|
||||
grains = _grain
|
||||
|
||||
return setval(key, grains)
|
||||
|
||||
|
||||
|
@ -499,6 +513,13 @@ def get_or_set_hash(name,
|
|||
.. code-block:: bash
|
||||
|
||||
salt '*' grains.get_or_set_hash 'django:SECRET_KEY' 50
|
||||
|
||||
.. warning::
|
||||
|
||||
This function could return strings which may contain characters which are reserved
|
||||
as directives by the YAML parser, such as strings beginning with `%`. To avoid
|
||||
issues when using the output of this function in an SLS file containing YAML+Jinja,
|
||||
surround the call with single quotes.
|
||||
'''
|
||||
ret = get(name, None)
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@ except ImportError:
|
|||
import os
|
||||
import locale
|
||||
from distutils.version import LooseVersion # pylint: disable=E0611
|
||||
import re
|
||||
|
||||
# Import salt libs
|
||||
import salt.utils
|
||||
|
@ -233,10 +232,6 @@ def list_pkgs(versions_as_list=False, **kwargs):
|
|||
if key in name_map:
|
||||
key = name_map[key]
|
||||
__salt__['pkg_resource.add_pkg'](ret, key, val)
|
||||
for key, val in _get_msi_software().iteritems():
|
||||
if key in name_map:
|
||||
key = name_map[key]
|
||||
__salt__['pkg_resource.add_pkg'](ret, key, val)
|
||||
|
||||
__salt__['pkg_resource.sort_pkglist'](ret)
|
||||
if not versions_as_list:
|
||||
|
@ -251,9 +246,7 @@ def _search_software(target):
|
|||
values added to the list passed in
|
||||
'''
|
||||
search_results = {}
|
||||
software = dict(
|
||||
list(_get_reg_software().items()) +
|
||||
list(_get_msi_software().items()))
|
||||
software = dict(_get_reg_software().items())
|
||||
for key, value in software.items():
|
||||
if key is not None:
|
||||
if target.lower() in key.lower():
|
||||
|
@ -261,56 +254,6 @@ def _search_software(target):
|
|||
return search_results
|
||||
|
||||
|
||||
def _get_msi_software():
|
||||
'''
|
||||
Uses powershell to search the msi product databases, returns a
|
||||
dict keyed on the product name as the key and the version as the
|
||||
value. If powershell is not available, returns `{}`
|
||||
'''
|
||||
win32_products = {}
|
||||
|
||||
# Don't use WMI to select from `Win32_product`, that has nasty
|
||||
# side effects. Use the `WindowsInstaller.Installer` COM object's
|
||||
# `ProductsEx`. Jumping through powershell because `ProductsEx` is
|
||||
# a get property that takes 3 arguments, and `win32com` can't call
|
||||
# that
|
||||
#
|
||||
# see https://github.com/saltstack/salt/issues/12550 for detail
|
||||
|
||||
# powershell script to fetch (name, version) from COM, and write
|
||||
# without word-wrapping. Attempting to target minimal powershell
|
||||
# versions
|
||||
ps = '''
|
||||
$msi = New-Object -ComObject WindowsInstaller.Installer;
|
||||
$msi.GetType().InvokeMember('ProductsEx', 'GetProperty', $null, $msi, ('', 's-1-1-0', 7))
|
||||
| select @{
|
||||
name='name';
|
||||
expression={$_.GetType().InvokeMember('InstallProperty', 'GetProperty', $null, $_, ('ProductName'))}
|
||||
},
|
||||
@{
|
||||
name='version';
|
||||
expression={$_.GetType().InvokeMember('InstallProperty', 'GetProperty', $null, $_, ('VersionString'))}
|
||||
}
|
||||
| Write-host
|
||||
'''.replace('\n', ' ') # make this a one-liner
|
||||
|
||||
ret = __salt__['cmd.run_all'](ps, shell='powershell', python_shell=True)
|
||||
# sometimes the powershell reflection fails on a single product,
|
||||
# giving us a non-zero return code AND useful output. Ignore RC
|
||||
# and just try to process stdout, which should empty if the cmd
|
||||
# failed.
|
||||
#
|
||||
# each line of output looks like:
|
||||
#
|
||||
# `@{name=PRD_NAME; version=PRD_VER}`
|
||||
pattern = r'@{name=(.+); version=(.+)}'
|
||||
for m in re.finditer(pattern, ret['stdout']):
|
||||
(prd_name, prd_ver) = m.groups()
|
||||
win32_products[prd_name] = prd_ver
|
||||
|
||||
return win32_products
|
||||
|
||||
|
||||
def _get_reg_software():
|
||||
'''
|
||||
This searches the uninstall keys in the registry to find
|
||||
|
@ -336,8 +279,7 @@ def _get_reg_software():
|
|||
|
||||
#attempt to corral the wild west of the multiple ways to install
|
||||
#software in windows
|
||||
reg_entries = dict(list(_get_user_keys().items()) +
|
||||
list(_get_machine_keys().items()))
|
||||
reg_entries = dict(_get_machine_keys().items())
|
||||
for reg_hive, reg_keys in reg_entries.items():
|
||||
for reg_key in reg_keys:
|
||||
try:
|
||||
|
@ -356,8 +298,6 @@ def _get_reg_software():
|
|||
reg_hive,
|
||||
prd_uninst_key,
|
||||
'WindowsInstaller')
|
||||
if windows_installer != 'Not Found' and windows_installer:
|
||||
continue
|
||||
|
||||
prd_name = _get_reg_value(
|
||||
reg_hive,
|
||||
|
@ -396,39 +336,6 @@ def _get_machine_keys():
|
|||
return machine_hive_and_keys
|
||||
|
||||
|
||||
def _get_user_keys():
|
||||
'''
|
||||
This will return the hive 'const' value and some registry keys where
|
||||
installed software information has been known to exist for the
|
||||
HKEY_USERS hive
|
||||
'''
|
||||
user_hive_and_keys = {}
|
||||
user_keys = []
|
||||
users_hive = win32con.HKEY_USERS
|
||||
#skip some built in and default users since software information in these
|
||||
#keys is limited
|
||||
skip_users = ['.DEFAULT',
|
||||
'S-1-5-18',
|
||||
'S-1-5-19',
|
||||
'S-1-5-20']
|
||||
sw_uninst_key = "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall"
|
||||
reg_handle = win32api.RegOpenKeyEx(
|
||||
users_hive,
|
||||
'',
|
||||
0,
|
||||
win32con.KEY_READ)
|
||||
for name, num, blank, time in win32api.RegEnumKeyEx(reg_handle):
|
||||
#this is some identical key of a sid that contains some software names
|
||||
#but no detailed information about the software installed for that user
|
||||
if '_Classes' in name:
|
||||
break
|
||||
if name not in skip_users:
|
||||
usr_sw_uninst_key = "\\".join([name, sw_uninst_key])
|
||||
user_keys.append(usr_sw_uninst_key)
|
||||
user_hive_and_keys[users_hive] = user_keys
|
||||
return user_hive_and_keys
|
||||
|
||||
|
||||
def _get_reg_value(reg_hive, reg_key, value_name=''):
|
||||
'''
|
||||
Read one value from Windows registry.
|
||||
|
|
|
@ -209,7 +209,8 @@ class Pillar(object):
|
|||
),
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
self.opts['environment']
|
||||
self.opts['environment'],
|
||||
_pillar_rend=True
|
||||
)
|
||||
]
|
||||
else:
|
||||
|
@ -222,7 +223,8 @@ class Pillar(object):
|
|||
),
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
saltenv=saltenv
|
||||
saltenv=saltenv,
|
||||
_pillar_rend=True
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
|
@ -257,7 +259,8 @@ class Pillar(object):
|
|||
).get('dest', False),
|
||||
self.rend,
|
||||
self.opts['renderer'],
|
||||
saltenv=saltenv
|
||||
saltenv=saltenv,
|
||||
_pillar_rend=True
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
|
|
|
@ -67,7 +67,7 @@ STATE_RUNTIME_KEYWORDS = frozenset([
|
|||
'fun',
|
||||
'state',
|
||||
'check_cmd',
|
||||
'fail_hard',
|
||||
'failhard',
|
||||
'onlyif',
|
||||
'unless',
|
||||
'order',
|
||||
|
|
|
@ -335,6 +335,7 @@ def _gen_keep_files(name, require):
|
|||
ret = set()
|
||||
if os.path.isdir(name):
|
||||
for root, dirs, files in os.walk(name):
|
||||
ret.add(name)
|
||||
for name in files:
|
||||
ret.add(os.path.join(root, name))
|
||||
for name in dirs:
|
||||
|
@ -346,7 +347,7 @@ def _gen_keep_files(name, require):
|
|||
required_files = [comp for comp in require if 'file' in comp]
|
||||
for comp in required_files:
|
||||
for low in __lowstate__:
|
||||
if low['__id__'] == comp['file']:
|
||||
if low['name'] == comp['file']:
|
||||
fn = low['name']
|
||||
if os.path.isdir(comp['file']):
|
||||
if _is_child(comp['file'], name):
|
||||
|
|
|
@ -18,10 +18,12 @@ def __virtual__():
|
|||
'''
|
||||
Load this state if this is the salt-master
|
||||
'''
|
||||
|
||||
return ('winrepo'
|
||||
if 'salt-master' in __grains__.get('roles', [])
|
||||
else False)
|
||||
try:
|
||||
return ('winrepo'
|
||||
if 'salt-master' in __grains__.get('roles', [])
|
||||
else False)
|
||||
except TypeError:
|
||||
return False
|
||||
|
||||
|
||||
def genrepo(name, force=False, allow_empty=False):
|
||||
|
|
|
@ -3,8 +3,14 @@
|
|||
import copy
|
||||
|
||||
# Import Salt Testing libs
|
||||
from salttesting import TestCase
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
|
@ -13,16 +19,24 @@ from salt.exceptions import SaltException
|
|||
from salt.modules import grains as grainsmod
|
||||
from salt.utils import dictupdate
|
||||
|
||||
grainsmod.__grains__ = {
|
||||
'os_family': 'MockedOS',
|
||||
'1': '1',
|
||||
'2': '2',
|
||||
grainsmod.__opts__ = {
|
||||
'conf_file': '/tmp/__salt_test_grains',
|
||||
'cachedir': '/tmp/__salt_test_grains_cache_dir'
|
||||
}
|
||||
|
||||
grainsmod.__salt__ = {}
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class GrainsModuleTestCase(TestCase):
|
||||
|
||||
def test_filter_by(self):
|
||||
grainsmod.__grains__ = {
|
||||
'os_family': 'MockedOS',
|
||||
'1': '1',
|
||||
'2': '2',
|
||||
}
|
||||
|
||||
dict1 = {'A': 'B', 'C': {'D': {'E': 'F', 'G': 'H'}}}
|
||||
dict2 = {
|
||||
'default': {
|
||||
|
@ -172,6 +186,141 @@ class GrainsModuleTestCase(TestCase):
|
|||
)
|
||||
)
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_not_a_list(self):
|
||||
# Failing append to an existing string, without convert
|
||||
grainsmod.__grains__ = {'b': 'bval'}
|
||||
res = grainsmod.append('b', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, 'The key b is not a valid list')
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'b': 'bval'})
|
||||
|
||||
# Failing append to an existing dict
|
||||
grainsmod.__grains__ = {'b': {'b1': 'bval1'}}
|
||||
res = grainsmod.append('b', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, 'The key b is not a valid list')
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'b': {'b1': 'bval1'}})
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_already_in_list(self):
|
||||
# Append an existing value
|
||||
grainsmod.__grains__ = {'a_list': ['a', 'b', 'c'], 'b': 'bval'}
|
||||
res = grainsmod.append('a_list', 'b')
|
||||
# check the result
|
||||
self.assertEqual(res, 'The val b was already in the list a_list')
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a_list': ['a', 'b', 'c'], 'b': 'bval'})
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_ok(self):
|
||||
# Append to an existing list
|
||||
grainsmod.__grains__ = {'a_list': ['a', 'b', 'c'], 'b': 'bval'}
|
||||
res = grainsmod.append('a_list', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, {'a_list': ['a', 'b', 'c', 'd']})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a_list': ['a', 'b', 'c', 'd'], 'b': 'bval'})
|
||||
|
||||
# Append to an non existing list
|
||||
grainsmod.__grains__ = {'b': 'bval'}
|
||||
res = grainsmod.append('a_list', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, {'a_list': ['d']})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a_list': ['d'], 'b': 'bval'})
|
||||
|
||||
# Append to an existing string, with convert
|
||||
grainsmod.__grains__ = {'b': 'bval'}
|
||||
res = grainsmod.append('b', 'd', convert=True)
|
||||
# check the result
|
||||
self.assertEqual(res, {'b': ['bval', 'd']})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'b': ['bval', 'd']})
|
||||
|
||||
# Append to an existing dict, with convert
|
||||
grainsmod.__grains__ = {'b': {'b1': 'bval1'}}
|
||||
res = grainsmod.append('b', 'd', convert=True)
|
||||
# check the result
|
||||
self.assertEqual(res, {'b': [{'b1': 'bval1'}, 'd']})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'b': [{'b1': 'bval1'}, 'd']})
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_nested_not_a_list(self):
|
||||
# Failing append to an existing string, without convert
|
||||
grainsmod.__grains__ = {'a': {'b': 'bval'}}
|
||||
res = grainsmod.append('a:b', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, 'The key a:b is not a valid list')
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'b': 'bval'}})
|
||||
|
||||
# Failing append to an existing dict
|
||||
grainsmod.__grains__ = {'a': {'b': {'b1': 'bval1'}}}
|
||||
res = grainsmod.append('a:b', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, 'The key a:b is not a valid list')
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'b': {'b1': 'bval1'}}})
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_nested_already_in_list(self):
|
||||
# Append an existing value
|
||||
grainsmod.__grains__ = {'a': {'a_list': ['a', 'b', 'c'], 'b': 'bval'}}
|
||||
res = grainsmod.append('a:a_list', 'b')
|
||||
# check the result
|
||||
self.assertEqual(res, 'The val b was already in the list a:a_list')
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'a_list': ['a', 'b', 'c'], 'b': 'bval'}})
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_nested_ok(self):
|
||||
# Append to an existing list
|
||||
grainsmod.__grains__ = {'a': {'a_list': ['a', 'b', 'c'], 'b': 'bval'}}
|
||||
res = grainsmod.append('a:a_list', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, {'a': {'a_list': ['a', 'b', 'c', 'd'], 'b': 'bval'}})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'a_list': ['a', 'b', 'c', 'd'], 'b': 'bval'}})
|
||||
|
||||
# Append to an non existing list
|
||||
grainsmod.__grains__ = {'a': {'b': 'bval'}}
|
||||
res = grainsmod.append('a:a_list', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, {'a': {'a_list': ['d'], 'b': 'bval'}})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'a_list': ['d'], 'b': 'bval'}})
|
||||
|
||||
# Append to an existing string, with convert
|
||||
grainsmod.__grains__ = {'a': {'b': 'bval'}}
|
||||
res = grainsmod.append('a:b', 'd', convert=True)
|
||||
# check the result
|
||||
self.assertEqual(res, {'a': {'b': ['bval', 'd']}})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'b': ['bval', 'd']}})
|
||||
|
||||
# Append to an existing dict, with convert
|
||||
grainsmod.__grains__ = {'a': {'b': {'b1': 'bval1'}}}
|
||||
res = grainsmod.append('a:b', 'd', convert=True)
|
||||
# check the result
|
||||
self.assertEqual(res, {'a': {'b': [{'b1': 'bval1'}, 'd']}})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': {'b': [{'b1': 'bval1'}, 'd']}})
|
||||
|
||||
@patch.dict(grainsmod.__salt__, {'saltutil.sync_grains': MagicMock()})
|
||||
def test_append_to_an_element_of_a_list(self):
|
||||
# Append to an element in a list
|
||||
# It currently fails silently
|
||||
grainsmod.__grains__ = {'a': ['b', 'c']}
|
||||
res = grainsmod.append('a:b', 'd')
|
||||
# check the result
|
||||
self.assertEqual(res, {'a': ['b', 'c']})
|
||||
# check the whole grains
|
||||
self.assertEqual(grainsmod.__grains__, {'a': ['b', 'c']})
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
|
|
Loading…
Add table
Reference in a new issue