mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge pull request #34076 from rallytime/merge-develop
[develop] Merge forward from 2016.3 to develop
This commit is contained in:
commit
3382bc5c28
31 changed files with 699 additions and 115 deletions
|
@ -1,5 +1,6 @@
|
|||
/*custom webhelp*/
|
||||
var windowheight = $( window ).height();
|
||||
var windowwidth = $( window ).width();
|
||||
|
||||
$( document ).ready(function() {
|
||||
|
||||
|
@ -183,7 +184,7 @@ function resizeend() {
|
|||
setTimeout(resizeend, delta);
|
||||
} else {
|
||||
timeout = false;
|
||||
if ($( window ).height() > windowheight || $( window ).height() + 40 < windowheight) {
|
||||
if ($( window ).height() != windowheight && $(window).width() != windowwidth) {
|
||||
location.reload(false);
|
||||
}
|
||||
}
|
||||
|
|
13
doc/topics/releases/2015.8.11.rst
Normal file
13
doc/topics/releases/2015.8.11.rst
Normal file
|
@ -0,0 +1,13 @@
|
|||
============================
|
||||
Salt 2015.8.11 Release Notes
|
||||
============================
|
||||
|
||||
Version 2015.8.11 is a bugfix release for :doc:`2015.8.0
|
||||
</topics/releases/2015.8.0>`.
|
||||
|
||||
Returner Changes
|
||||
================
|
||||
|
||||
- Any returner which implements a ``save_load`` function is now required to
|
||||
accept a ``minions`` keyword argument. All returners which ship with Salt
|
||||
have been modified to do so.
|
13
doc/topics/releases/2016.3.2.rst
Normal file
13
doc/topics/releases/2016.3.2.rst
Normal file
|
@ -0,0 +1,13 @@
|
|||
===========================
|
||||
Salt 2016.3.2 Release Notes
|
||||
===========================
|
||||
|
||||
Version 2016.3.2 is a bugfix release for :doc:`2016.3.0
|
||||
</topics/releases/2016.3.0>`.
|
||||
|
||||
Returner Changes
|
||||
================
|
||||
|
||||
- Any returner which implements a ``save_load`` function is now required to
|
||||
accept a ``minions`` keyword argument. All returners which ship with Salt
|
||||
have been modified to do so.
|
|
@ -726,7 +726,12 @@ class FSChan(object):
|
|||
self.kwargs = kwargs
|
||||
self.fs = Fileserver(self.opts)
|
||||
self.fs.init()
|
||||
self.fs.update()
|
||||
if self.opts.get('file_client', 'remote') == 'local':
|
||||
if '__fs_update' not in self.opts:
|
||||
self.fs.update()
|
||||
self.opts['__fs_update'] = True
|
||||
else:
|
||||
self.fs.update()
|
||||
self.cmd_stub = {'ext_nodes': {}}
|
||||
|
||||
def send(self, load, tries=None, timeout=None, raw=False): # pylint: disable=unused-argument
|
||||
|
|
|
@ -62,6 +62,7 @@ import salt.daemons.masterapi
|
|||
import salt.defaults.exitcodes
|
||||
import salt.transport.server
|
||||
import salt.log.setup
|
||||
import salt.utils.args
|
||||
import salt.utils.atomicfile
|
||||
import salt.utils.event
|
||||
import salt.utils.job
|
||||
|
@ -2264,21 +2265,40 @@ class ClearFuncs(object):
|
|||
self.event.fire_event(new_job_load, tagify([clear_load['jid'], 'new'], 'job'))
|
||||
|
||||
if self.opts['ext_job_cache']:
|
||||
fstr = '{0}.save_load'.format(self.opts['ext_job_cache'])
|
||||
save_load_func = True
|
||||
|
||||
# Get the returner's save_load arg_spec.
|
||||
try:
|
||||
fstr = '{0}.save_load'.format(self.opts['ext_job_cache'])
|
||||
self.mminion.returners[fstr](clear_load['jid'], clear_load, minions=minions)
|
||||
except KeyError:
|
||||
arg_spec = salt.utils.args.get_function_argspec(fstr)
|
||||
|
||||
# Check if 'minions' is included in returner's save_load arg_spec.
|
||||
# This may be missing in custom returners, which we should warn about.
|
||||
if 'minions' not in arg_spec.args:
|
||||
log.critical(
|
||||
'The specified returner used for the external job cache '
|
||||
'\'{0}\' does not have a \'minions\' kwarg in the returner\'s '
|
||||
'save_load function.'.format(
|
||||
self.opts['ext_job_cache']
|
||||
)
|
||||
)
|
||||
except AttributeError:
|
||||
save_load_func = False
|
||||
log.critical(
|
||||
'The specified returner used for the external job cache '
|
||||
'"{0}" does not have a save_load function!'.format(
|
||||
self.opts['ext_job_cache']
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
log.critical(
|
||||
'The specified returner threw a stack trace:\n',
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
if save_load_func:
|
||||
try:
|
||||
self.mminion.returners[fstr](clear_load['jid'], clear_load, minions=minions)
|
||||
except Exception:
|
||||
log.critical(
|
||||
'The specified returner threw a stack trace:\n',
|
||||
exc_info=True
|
||||
)
|
||||
|
||||
# always write out to the master job caches
|
||||
try:
|
||||
|
|
|
@ -84,7 +84,7 @@ def _query(function,
|
|||
)
|
||||
|
||||
if result.get('status', None) == salt.ext.six.moves.http_client.OK:
|
||||
ret['data'] = result['dict']
|
||||
ret['data'] = result.get('dict', result)
|
||||
ret['res'] = True
|
||||
elif result.get('status', None) == salt.ext.six.moves.http_client.NO_CONTENT:
|
||||
ret['res'] = False
|
||||
|
|
|
@ -3496,13 +3496,14 @@ def get_managed(
|
|||
# Copy the file to the minion and templatize it
|
||||
sfn = ''
|
||||
source_sum = {}
|
||||
remote_protos = ('http', 'https', 'ftp', 'swift', 's3')
|
||||
|
||||
def _get_local_file_source_sum(path):
|
||||
'''
|
||||
DRY helper for getting the source_sum value from a locally cached
|
||||
path.
|
||||
'''
|
||||
return {'hsum': get_hash(path), 'hash_type': 'sha256'}
|
||||
return {'hsum': get_hash(path, form='sha256'), 'hash_type': 'sha256'}
|
||||
|
||||
# If we have a source defined, let's figure out what the hash is
|
||||
if source:
|
||||
|
@ -3518,8 +3519,7 @@ def get_managed(
|
|||
else:
|
||||
if not skip_verify:
|
||||
if source_hash:
|
||||
protos = ('salt', 'http', 'https', 'ftp', 'swift',
|
||||
's3', 'file')
|
||||
protos = ('salt', 'file') + remote_protos
|
||||
|
||||
def _invalid_source_hash_format():
|
||||
'''
|
||||
|
@ -3551,75 +3551,78 @@ def get_managed(
|
|||
|
||||
else:
|
||||
# The source_hash is a hash string
|
||||
comps = source_hash.split('=')
|
||||
comps = source_hash.split('=', 1)
|
||||
if len(comps) < 2:
|
||||
return _invalid_source_hash_format()
|
||||
source_sum['hsum'] = comps[1].strip()
|
||||
source_sum['hash_type'] = comps[0].strip()
|
||||
else:
|
||||
return '', {}, ('Unable to determine upstream hash of '
|
||||
'source file {0}'.format(source))
|
||||
msg = (
|
||||
'Unable to verify upstream hash of source file {0}, '
|
||||
'please set source_hash or set skip_verify to True'
|
||||
.format(source)
|
||||
)
|
||||
return '', {}, msg
|
||||
|
||||
# if the file is a template we need to actually template the file to get
|
||||
# a checksum, but we can cache the template itself, but only if there is
|
||||
# a template source (it could be a templated contents)
|
||||
if template and source:
|
||||
# Check if we have the template cached
|
||||
template_dest = __salt__['cp.is_cached'](source, saltenv)
|
||||
if template_dest and source_hash:
|
||||
comps = source_hash.split('=')
|
||||
cached_template_sum = get_hash(template_dest, form=source_sum['hash_type'])
|
||||
if cached_template_sum == source_sum['hsum']:
|
||||
sfn = template_dest
|
||||
if source and (template or urlparsed_source.scheme in remote_protos):
|
||||
# Check if we have the template or remote file cached
|
||||
cached_dest = __salt__['cp.is_cached'](source, saltenv)
|
||||
if cached_dest and (source_hash or skip_verify):
|
||||
htype = source_sum.get('hash_type', 'sha256')
|
||||
cached_sum = get_hash(cached_dest, form=htype)
|
||||
if skip_verify or cached_sum == source_sum['hsum']:
|
||||
sfn = cached_dest
|
||||
source_sum = {'hsum': cached_sum, 'hash_type': htype}
|
||||
|
||||
# If we didn't have the template file, let's get it
|
||||
# If we didn't have the template or remote file, let's get it
|
||||
if not sfn:
|
||||
try:
|
||||
sfn = __salt__['cp.cache_file'](source, saltenv)
|
||||
except Exception as exc:
|
||||
# A 404 or other error code may raise an exception, catch it
|
||||
# and return a comment that will fail the calling state.
|
||||
return '', {}, ('Failed to cache template file {0}: {1}'
|
||||
.format(source, exc))
|
||||
return '', {}, 'Failed to cache {0}: {1}'.format(source, exc)
|
||||
|
||||
# exists doesn't play nice with sfn as bool
|
||||
# but if cache failed, sfn == False
|
||||
# If cache failed, sfn will be False, so do a truth check on sfn first
|
||||
# as invoking os.path.exists() on a bool raises a TypeError.
|
||||
if not sfn or not os.path.exists(sfn):
|
||||
return sfn, {}, 'Source file \'{0}\' not found'.format(source)
|
||||
if sfn == name:
|
||||
raise SaltInvocationError(
|
||||
'Source file cannot be the same as destination'
|
||||
)
|
||||
if template in salt.utils.templates.TEMPLATE_REGISTRY:
|
||||
context_dict = defaults if defaults else {}
|
||||
if context:
|
||||
context_dict.update(context)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
sfn,
|
||||
name=name,
|
||||
source=source,
|
||||
user=user,
|
||||
group=group,
|
||||
mode=mode,
|
||||
saltenv=saltenv,
|
||||
context=context_dict,
|
||||
salt=__salt__,
|
||||
pillar=__pillar__,
|
||||
grains=__grains__,
|
||||
opts=__opts__,
|
||||
**kwargs)
|
||||
else:
|
||||
return sfn, {}, ('Specified template format {0} is not supported'
|
||||
).format(template)
|
||||
|
||||
if data['result']:
|
||||
sfn = data['data']
|
||||
hsum = get_hash(sfn)
|
||||
source_sum = {'hash_type': 'sha256',
|
||||
'hsum': hsum}
|
||||
else:
|
||||
__clean_tmp(sfn)
|
||||
return sfn, {}, data['data']
|
||||
if template:
|
||||
if template in salt.utils.templates.TEMPLATE_REGISTRY:
|
||||
context_dict = defaults if defaults else {}
|
||||
if context:
|
||||
context_dict.update(context)
|
||||
data = salt.utils.templates.TEMPLATE_REGISTRY[template](
|
||||
sfn,
|
||||
name=name,
|
||||
source=source,
|
||||
user=user,
|
||||
group=group,
|
||||
mode=mode,
|
||||
saltenv=saltenv,
|
||||
context=context_dict,
|
||||
salt=__salt__,
|
||||
pillar=__pillar__,
|
||||
grains=__grains__,
|
||||
opts=__opts__,
|
||||
**kwargs)
|
||||
else:
|
||||
return sfn, {}, ('Specified template format {0} is not supported'
|
||||
).format(template)
|
||||
|
||||
if data['result']:
|
||||
sfn = data['data']
|
||||
hsum = get_hash(sfn, form='sha256')
|
||||
source_sum = {'hash_type': 'sha256',
|
||||
'hsum': hsum}
|
||||
else:
|
||||
__clean_tmp(sfn)
|
||||
return sfn, {}, data['data']
|
||||
|
||||
return sfn, source_sum, ''
|
||||
|
||||
|
@ -4255,7 +4258,7 @@ def manage_file(name,
|
|||
'Specified {0} checksum for {1} ({2}) does not match '
|
||||
'actual checksum ({3})'.format(
|
||||
source_sum['hash_type'],
|
||||
name,
|
||||
source,
|
||||
source_sum['hsum'],
|
||||
dl_sum
|
||||
)
|
||||
|
|
|
@ -13,3 +13,35 @@
|
|||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
from salt.modules.inspectlib.exceptions import InspectorSnapshotException
|
||||
from salt.modules.inspectlib.dbhandle import DBHandle
|
||||
|
||||
|
||||
class EnvLoader(object):
|
||||
'''
|
||||
Load environment.
|
||||
'''
|
||||
PID_FILE = '_minion_collector.pid'
|
||||
DB_FILE = '_minion_collector.db'
|
||||
DEFAULT_PID_PATH = '/var/run'
|
||||
DEFAULT_CACHE_PATH = '/var/cache/salt'
|
||||
|
||||
def __init__(self, cachedir=None, piddir=None, pidfilename=None):
|
||||
'''
|
||||
Constructor.
|
||||
|
||||
:param options:
|
||||
:param db_path:
|
||||
:param pid_file:
|
||||
'''
|
||||
if not cachedir and '__salt__' in globals():
|
||||
cachedir = globals().get('__salt__')['config.get']('inspector.db', '')
|
||||
|
||||
self.dbfile = os.path.join(cachedir or self.DEFAULT_CACHE_PATH, self.DB_FILE)
|
||||
self.db = DBHandle(self.dbfile)
|
||||
|
||||
if not piddir and '__salt__' in globals():
|
||||
piddir = globals().get('__salt__')['config.get']('inspector.pid', '')
|
||||
self.pidfile = os.path.join(piddir or self.DEFAULT_PID_PATH, pidfilename or self.PID_FILE)
|
||||
|
|
|
@ -19,16 +19,26 @@ from __future__ import absolute_import, print_function
|
|||
import os
|
||||
import sys
|
||||
from subprocess import Popen, PIPE, STDOUT
|
||||
import logging
|
||||
|
||||
# Import Salt Libs
|
||||
from salt.modules.inspectlib.dbhandle import DBHandle
|
||||
from salt.modules.inspectlib.exceptions import (InspectorSnapshotException)
|
||||
from salt.modules.inspectlib import EnvLoader
|
||||
from salt.modules.inspectlib import kiwiproc
|
||||
import salt.utils
|
||||
from salt.utils import fsutils
|
||||
from salt.utils import reinit_crypto
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
try:
|
||||
import kiwi
|
||||
except ImportError:
|
||||
kiwi = None
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Inspector(object):
|
||||
class Inspector(EnvLoader):
|
||||
DEFAULT_MINION_CONFIG_PATH = '/etc/salt/minion'
|
||||
|
||||
MODE = ['configuration', 'payload', 'all']
|
||||
|
@ -38,26 +48,14 @@ class Inspector(object):
|
|||
"/var/lib/rpm", "/.snapshots", "/.zfs", "/etc/ssh",
|
||||
"/root", "/home"]
|
||||
|
||||
def __init__(self, db_path=None, pid_file=None):
|
||||
# Configured path
|
||||
if not db_path and '__salt__' in globals():
|
||||
db_path = globals().get('__salt__')['config.get']('inspector.db', '')
|
||||
def __init__(self, cachedir=None, piddir=None, pidfilename=None):
|
||||
EnvLoader.__init__(self, cachedir=cachedir, piddir=piddir, pidfilename=pidfilename)
|
||||
|
||||
if not db_path:
|
||||
raise InspectorSnapshotException('Inspector database location is not configured yet in minion.\n'
|
||||
'Add "inspector.db: /path/to/cache" in "/etc/salt/minion".')
|
||||
self.dbfile = db_path
|
||||
|
||||
self.db = DBHandle(self.dbfile)
|
||||
self.db.open()
|
||||
|
||||
if not pid_file and '__salt__' in globals():
|
||||
pid_file = globals().get('__salt__')['config.get']('inspector.pid', '')
|
||||
|
||||
if not pid_file:
|
||||
raise InspectorSnapshotException("Inspector PID file location is not configured yet in minion.\n"
|
||||
'Add "inspector.pid: /path/to/pids in "/etc/salt/minion".')
|
||||
self.pidfile = pid_file
|
||||
# TODO: This is nasty. Need to do something with this better. ASAP!
|
||||
try:
|
||||
self.db.open()
|
||||
except Exception as ex:
|
||||
log.error('Unable to [re]open db. Already opened?')
|
||||
|
||||
def _syscall(self, command, input=None, env=None, *params):
|
||||
'''
|
||||
|
@ -411,7 +409,34 @@ class Inspector(object):
|
|||
self._prepare_full_scan(**kwargs)
|
||||
|
||||
os.system("nice -{0} python {1} {2} {3} {4} & > /dev/null".format(
|
||||
priority, __file__, self.pidfile, self.dbfile, mode))
|
||||
priority, __file__, os.path.dirname(self.pidfile), os.path.dirname(self.dbfile), mode))
|
||||
|
||||
def export(self, description, local=False, path='/tmp', format='qcow2'):
|
||||
'''
|
||||
Export description for Kiwi.
|
||||
|
||||
:param local:
|
||||
:param path:
|
||||
:return:
|
||||
'''
|
||||
kiwiproc.__salt__ = __salt__
|
||||
return kiwiproc.KiwiExporter(grains=__grains__,
|
||||
format=format).load(**description).export('something')
|
||||
|
||||
def build(self, format='qcow2', path='/tmp'):
|
||||
'''
|
||||
Build an image using Kiwi.
|
||||
|
||||
:param format:
|
||||
:param path:
|
||||
:return:
|
||||
'''
|
||||
if kiwi is None:
|
||||
msg = 'Unable to build the image due to the missing dependencies: Kiwi module is not available.'
|
||||
log.error(msg)
|
||||
raise CommandExecutionError(msg)
|
||||
|
||||
raise CommandExecutionError("Build is not yet implemented")
|
||||
|
||||
|
||||
def is_alive(pidfile):
|
||||
|
@ -458,7 +483,7 @@ if __name__ == '__main__':
|
|||
pid = os.fork()
|
||||
if pid > 0:
|
||||
reinit_crypto()
|
||||
fpid = open(pidfile, "w")
|
||||
fpid = open(os.path.join(pidfile, EnvLoader.PID_FILE), "w")
|
||||
fpid.write("{0}\n".format(pid))
|
||||
fpid.close()
|
||||
sys.exit(0)
|
||||
|
|
|
@ -31,3 +31,9 @@ class SIException(Exception):
|
|||
'''
|
||||
System information exception.
|
||||
'''
|
||||
|
||||
|
||||
class InspectorKiwiProcessorException(Exception):
|
||||
'''
|
||||
Kiwi builder/exporter exception.
|
||||
'''
|
||||
|
|
242
salt/modules/inspectlib/kiwiproc.py
Normal file
242
salt/modules/inspectlib/kiwiproc.py
Normal file
|
@ -0,0 +1,242 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2016 SUSE LLC
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import os
|
||||
import grp
|
||||
import pwd
|
||||
from lxml import etree
|
||||
from xml.dom import minidom
|
||||
import platform
|
||||
import socket
|
||||
from salt.modules.inspectlib.exceptions import InspectorKiwiProcessorException
|
||||
|
||||
|
||||
class KiwiExporter(object):
|
||||
'''
|
||||
Exports system description as Kiwi configuration.
|
||||
'''
|
||||
def __init__(self, grains, format):
|
||||
self.__grains__ = grains
|
||||
self.format = format
|
||||
self._data = type('data', (), {})
|
||||
self.name = None
|
||||
|
||||
def load(self, **descr):
|
||||
'''
|
||||
Load data by keys.
|
||||
|
||||
:param data:
|
||||
:return:
|
||||
'''
|
||||
for obj, data in descr.items():
|
||||
setattr(self._data, obj, data)
|
||||
|
||||
return self
|
||||
|
||||
def export(self, name):
|
||||
'''
|
||||
Export to the Kiwi config.xml as text.
|
||||
|
||||
:return:
|
||||
'''
|
||||
|
||||
self.name = name
|
||||
root = self._create_doc()
|
||||
self._set_description(root)
|
||||
self._set_preferences(root)
|
||||
self._set_repositories(root)
|
||||
self._set_users(root)
|
||||
self._set_packages(root)
|
||||
|
||||
return '\n'.join([line for line in minidom.parseString(
|
||||
etree.tostring(root, encoding='UTF-8', pretty_print=True)).toprettyxml(indent=" ").split("\n")
|
||||
if line.strip()])
|
||||
|
||||
def _get_package_manager(self):
|
||||
'''
|
||||
Get package manager.
|
||||
|
||||
:return:
|
||||
'''
|
||||
ret = None
|
||||
if self.__grains__.get('os_family') in ('Kali', 'Debian'):
|
||||
ret = 'apt-get'
|
||||
elif self.__grains__.get('os_family', '') == 'Suse':
|
||||
ret = 'zypper'
|
||||
elif self.__grains__.get('os_family', '') == 'redhat':
|
||||
ret = 'yum'
|
||||
|
||||
if ret is None:
|
||||
raise InspectorKiwiProcessorException('Unsupported platform: {0}'.format(self.__grains__.get('os_family')))
|
||||
|
||||
return ret
|
||||
|
||||
def _set_preferences(self, node):
|
||||
'''
|
||||
Set preferences.
|
||||
|
||||
:return:
|
||||
'''
|
||||
pref = etree.SubElement(node, 'preferences')
|
||||
pacman = etree.SubElement(pref, 'packagemanager')
|
||||
pacman.text = self._get_package_manager()
|
||||
p_version = etree.SubElement(pref, 'version')
|
||||
p_version.text = '0.0.1'
|
||||
p_type = etree.SubElement(pref, 'type')
|
||||
p_type.set('image', 'vmx')
|
||||
|
||||
for disk_id, disk_data in self._data.system.get('disks', {}).items():
|
||||
if disk_id.startswith('/dev'):
|
||||
p_type.set('filesystem', disk_data.get('type') or 'ext3')
|
||||
break
|
||||
|
||||
p_type.set('installiso', 'true')
|
||||
p_type.set('boot', "vmxboot/suse-leap42.1")
|
||||
p_type.set('format', self.format)
|
||||
p_type.set('bootloader', 'grub2')
|
||||
p_type.set('timezone', __salt__['timezone.get_zone']())
|
||||
p_type.set('hwclock', __salt__['timezone.get_hwclock']())
|
||||
|
||||
return pref
|
||||
|
||||
def _get_user_groups(self, user):
|
||||
'''
|
||||
Get user groups.
|
||||
|
||||
:param user:
|
||||
:return:
|
||||
'''
|
||||
return [g.gr_name for g in grp.getgrall()
|
||||
if user in g.gr_mem] + [grp.getgrgid(pwd.getpwnam(user).pw_gid).gr_name]
|
||||
|
||||
def _set_users(self, node):
|
||||
'''
|
||||
Create existing local users.
|
||||
|
||||
<users group="root">
|
||||
<user password="$1$wYJUgpM5$RXMMeASDc035eX.NbYWFl0" home="/root" name="root"/>
|
||||
</users>
|
||||
|
||||
:param node:
|
||||
:return:
|
||||
'''
|
||||
# Get real local users with the local passwords
|
||||
shadow = {}
|
||||
for sh_line in open('/etc/shadow').read().split(os.linesep):
|
||||
if sh_line.strip():
|
||||
login, pwd = sh_line.split(":")[:2]
|
||||
if pwd and pwd[0] not in '!*':
|
||||
shadow[login] = {'p': pwd}
|
||||
|
||||
for ps_line in open('/etc/passwd').read().split(os.linesep):
|
||||
if ps_line.strip():
|
||||
ps_line = ps_line.strip().split(':')
|
||||
if ps_line[0] in shadow:
|
||||
shadow[ps_line[0]]['h'] = ps_line[5]
|
||||
shadow[ps_line[0]]['s'] = ps_line[6]
|
||||
shadow[ps_line[0]]['g'] = self._get_user_groups(ps_line[0])
|
||||
|
||||
users_groups = []
|
||||
users_node = etree.SubElement(node, 'users')
|
||||
for u_name, u_data in shadow.items():
|
||||
user_node = etree.SubElement(users_node, 'user')
|
||||
user_node.set('password', u_data['p'])
|
||||
user_node.set('home', u_data['h'])
|
||||
user_node.set('name', u_name)
|
||||
users_groups.extend(u_data['g'])
|
||||
users_node.set('group', ','.join(users_groups))
|
||||
|
||||
return users_node
|
||||
|
||||
def _set_repositories(self, node):
|
||||
'''
|
||||
Create repositories.
|
||||
|
||||
:param node:
|
||||
:return:
|
||||
'''
|
||||
priority = 99
|
||||
|
||||
for repo_id, repo_data in self._data.software.get('repositories', {}).items():
|
||||
if type(repo_data) == list:
|
||||
repo_data = repo_data[0]
|
||||
if repo_data.get('enabled') or not repo_data.get('disabled'): # RPM and Debian, respectively
|
||||
uri = repo_data.get('baseurl', repo_data.get('uri'))
|
||||
if not uri:
|
||||
continue
|
||||
repo = etree.SubElement(node, 'repository')
|
||||
if self.__grains__.get('os_family') in ('Kali', 'Debian'):
|
||||
repo.set('alias', repo_id)
|
||||
repo.set('distribution', repo_data['dist'])
|
||||
else:
|
||||
repo.set('alias', repo_data['alias'])
|
||||
if self.__grains__.get('os_family', '') == 'Suse':
|
||||
repo.set('type', 'yast2') # TODO: Check for options!
|
||||
repo.set('priority', str(priority))
|
||||
source = etree.SubElement(repo, 'source')
|
||||
source.set('path', uri) # RPM and Debian, respectively
|
||||
priority -= 1
|
||||
|
||||
def _set_packages(self, node):
|
||||
'''
|
||||
Set packages and collections.
|
||||
|
||||
:param node:
|
||||
:return:
|
||||
'''
|
||||
pkgs = etree.SubElement(node, 'packages')
|
||||
for pkg_name, pkg_version in sorted(self._data.software.get('packages', {}).items()):
|
||||
pkg = etree.SubElement(pkgs, 'package')
|
||||
pkg.set('name', pkg_name)
|
||||
|
||||
# Add collections (SUSE)
|
||||
if self.__grains__.get('os_family', '') == 'Suse':
|
||||
for ptn_id, ptn_data in self._data.software.get('patterns', {}).items():
|
||||
if ptn_data.get('installed'):
|
||||
ptn = etree.SubElement(pkgs, 'namedCollection')
|
||||
ptn.set('name', ptn_id)
|
||||
|
||||
return pkgs
|
||||
|
||||
def _set_description(self, node):
|
||||
'''
|
||||
Create a system description.
|
||||
|
||||
:return:
|
||||
'''
|
||||
hostname = socket.getfqdn() or platform.node()
|
||||
|
||||
descr = etree.SubElement(node, 'description')
|
||||
author = etree.SubElement(descr, 'author')
|
||||
author.text = "salt.modules.node on {0}".format(hostname)
|
||||
contact = etree.SubElement(descr, 'contact')
|
||||
contact.text = 'root@{0}'.format(hostname)
|
||||
specs = etree.SubElement(descr, 'specification')
|
||||
specs.text = 'Rebuild of {0}, based on Salt inspection.'.format(hostname)
|
||||
|
||||
return descr
|
||||
|
||||
def _create_doc(self):
|
||||
'''
|
||||
Create document.
|
||||
|
||||
:return:
|
||||
'''
|
||||
root = etree.Element('image')
|
||||
root.set('schemaversion', '6.3')
|
||||
root.set('name', self.name)
|
||||
|
||||
return root
|
|
@ -22,8 +22,8 @@ import logging
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.utils.network
|
||||
from salt.modules.inspectlib.dbhandle import DBHandle
|
||||
from salt.modules.inspectlib.exceptions import (InspectorQueryException, SIException)
|
||||
from salt.modules.inspectlib import EnvLoader
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -53,8 +53,8 @@ class SysInfo(object):
|
|||
log.error(msg)
|
||||
raise SIException(msg)
|
||||
|
||||
devpath, blocks, used, available, used_p, mountpoint = [elm for elm in out['stdout'].split(os.linesep)[-1].split(" ") if elm]
|
||||
|
||||
devpath, blocks, used, available, used_p, mountpoint = [elm for elm in
|
||||
out['stdout'].split(os.linesep)[-1].split(" ") if elm]
|
||||
return {
|
||||
'device': devpath, 'blocks': blocks, 'used': used,
|
||||
'available': available, 'used (%)': used_p, 'mounted': mountpoint,
|
||||
|
@ -135,7 +135,7 @@ class SysInfo(object):
|
|||
}
|
||||
|
||||
|
||||
class Query(object):
|
||||
class Query(EnvLoader):
|
||||
'''
|
||||
Query the system.
|
||||
This class is actually puts all Salt features together,
|
||||
|
@ -153,7 +153,7 @@ class Query(object):
|
|||
|
||||
SCOPES = ["changes", "configuration", "identity", "system", "software", "services", "payload", "all"]
|
||||
|
||||
def __init__(self, scope):
|
||||
def __init__(self, scope, cachedir=None):
|
||||
'''
|
||||
Constructor.
|
||||
|
||||
|
@ -163,8 +163,8 @@ class Query(object):
|
|||
if scope not in self.SCOPES:
|
||||
raise InspectorQueryException(
|
||||
"Unknown scope: {0}. Must be one of: {1}".format(repr(scope), ", ".join(self.SCOPES)))
|
||||
EnvLoader.__init__(self, cachedir=cachedir)
|
||||
self.scope = '_' + scope
|
||||
self.db = DBHandle(globals()['__salt__']['config.get']('inspector.db', ''))
|
||||
self.local_identity = dict()
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
|
|
|
@ -19,8 +19,11 @@ Module for full system inspection.
|
|||
'''
|
||||
from __future__ import absolute_import
|
||||
import logging
|
||||
import os
|
||||
import getpass
|
||||
from salt.modules.inspectlib.exceptions import (InspectorQueryException,
|
||||
InspectorSnapshotException)
|
||||
InspectorSnapshotException,
|
||||
InspectorKiwiProcessorException)
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils
|
||||
|
@ -89,7 +92,9 @@ def inspect(mode='all', priority=19, **kwargs):
|
|||
'''
|
||||
collector = _("collector")
|
||||
try:
|
||||
return collector.Inspector().request_snapshot(mode, priority=priority, **kwargs)
|
||||
return collector.Inspector(cachedir=__opts__['cachedir'],
|
||||
piddir=os.path.dirname(__opts__['pidfile']))\
|
||||
.request_snapshot(mode, priority=priority, **kwargs)
|
||||
except InspectorSnapshotException as ex:
|
||||
raise CommandExecutionError(ex)
|
||||
except Exception as ex:
|
||||
|
@ -154,9 +159,68 @@ def query(scope, **kwargs):
|
|||
'''
|
||||
query = _("query")
|
||||
try:
|
||||
return query.Query(scope)(**kwargs)
|
||||
return query.Query(scope, cachedir=__opts__['cachedir'])(**kwargs)
|
||||
except InspectorQueryException as ex:
|
||||
raise CommandExecutionError(ex)
|
||||
except Exception as ex:
|
||||
log.error(_get_error_message(ex))
|
||||
raise Exception(ex)
|
||||
|
||||
|
||||
def build(format='qcow2', path='/tmp/'):
|
||||
'''
|
||||
Build an image from a current system description.
|
||||
The image is a system image can be output in bootable ISO or QCOW2 formats.
|
||||
|
||||
Node uses the image building library Kiwi to perform the actual build.
|
||||
|
||||
Parameters:
|
||||
|
||||
* **format**: Specifies output format: "qcow2" or "iso. Default: `qcow2`.
|
||||
* **path**: Specifies output path where to store built image. Default: `/tmp`.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash:
|
||||
|
||||
salt myminion node.build
|
||||
salt myminion node.build format=iso path=/opt/builds/
|
||||
'''
|
||||
try:
|
||||
_("collector").Inspector(cachedir=__opts__['cachedir'],
|
||||
piddir=os.path.dirname(__opts__['pidfile']),
|
||||
pidfilename='').build(format=format, path=path)
|
||||
except InspectorKiwiProcessorException as ex:
|
||||
raise CommandExecutionError(ex)
|
||||
except Exception as ex:
|
||||
log.error(_get_error_message(ex))
|
||||
raise Exception(ex)
|
||||
|
||||
|
||||
def export(local=False, path="/tmp", format='qcow2'):
|
||||
'''
|
||||
Export an image description for Kiwi.
|
||||
|
||||
Parameters:
|
||||
|
||||
* **local**: Specifies True or False if the export has to be in the local file. Default: False.
|
||||
* **path**: If `local=True`, then specifies the path where file with the Kiwi description is written.
|
||||
Default: `/tmp`.
|
||||
|
||||
CLI Example:
|
||||
|
||||
.. code-block:: bash:
|
||||
|
||||
salt myminion node.export
|
||||
salt myminion node.export format=iso path=/opt/builds/
|
||||
'''
|
||||
if getpass.getuser() != 'root':
|
||||
raise CommandExecutionError('In order to export system, the minion should run as "root".')
|
||||
try:
|
||||
description = _("query").Query('all', cachedir=__opts__['cachedir'])()
|
||||
return _("collector").Inspector().export(description, local=local, path=path, format=format)
|
||||
except InspectorKiwiProcessorException as ex:
|
||||
raise CommandExecutionError(ex)
|
||||
except Exception as ex:
|
||||
log.error(_get_error_message(ex))
|
||||
raise Exception(ex)
|
||||
|
|
|
@ -9,12 +9,12 @@ Enable and disable apache modules.
|
|||
.. code-block:: yaml
|
||||
|
||||
Enable cgi module:
|
||||
apache_module.enabled:
|
||||
- name: cgi
|
||||
apache_module.enabled:
|
||||
- name: cgi
|
||||
|
||||
Disable cgi module:
|
||||
apache_module.disabled:
|
||||
- name: cgi
|
||||
apache_module.disabled:
|
||||
- name: cgi
|
||||
'''
|
||||
from __future__ import absolute_import
|
||||
from salt.ext.six import string_types
|
||||
|
|
|
@ -1917,9 +1917,9 @@ def directory(name,
|
|||
- dir_mode: 755
|
||||
- file_mode: 644
|
||||
- recurse:
|
||||
- user
|
||||
- group
|
||||
- mode
|
||||
- user
|
||||
- group
|
||||
- mode
|
||||
|
||||
Leave files or directories unchanged:
|
||||
|
||||
|
@ -1932,10 +1932,10 @@ def directory(name,
|
|||
- dir_mode: 755
|
||||
- file_mode: 644
|
||||
- recurse:
|
||||
- user
|
||||
- group
|
||||
- mode
|
||||
- ignore_dirs
|
||||
- user
|
||||
- group
|
||||
- mode
|
||||
- ignore_dirs
|
||||
|
||||
.. versionadded:: 2015.5.0
|
||||
|
||||
|
@ -3821,9 +3821,9 @@ def append(name,
|
|||
- append
|
||||
- template: jinja
|
||||
- sources:
|
||||
- salt://motd/devops-messages.tmpl
|
||||
- salt://motd/hr-messages.tmpl
|
||||
- salt://motd/general-messages.tmpl
|
||||
- salt://motd/devops-messages.tmpl
|
||||
- salt://motd/hr-messages.tmpl
|
||||
- salt://motd/general-messages.tmpl
|
||||
|
||||
.. versionadded:: 0.9.5
|
||||
'''
|
||||
|
@ -4013,9 +4013,9 @@ def prepend(name,
|
|||
- prepend
|
||||
- template: jinja
|
||||
- sources:
|
||||
- salt://motd/devops-messages.tmpl
|
||||
- salt://motd/hr-messages.tmpl
|
||||
- salt://motd/general-messages.tmpl
|
||||
- salt://motd/devops-messages.tmpl
|
||||
- salt://motd/hr-messages.tmpl
|
||||
- salt://motd/general-messages.tmpl
|
||||
|
||||
.. versionadded:: 2014.7.0
|
||||
'''
|
||||
|
|
|
@ -931,8 +931,13 @@ class DaemonMixIn(six.with_metaclass(MixInMeta, object)):
|
|||
|
||||
if self.check_pidfile():
|
||||
pid = self.get_pidfile()
|
||||
if self.check_pidfile() and self.is_daemonized(pid) and not ppid == pid:
|
||||
return True
|
||||
if not salt.utils.is_windows():
|
||||
if self.check_pidfile() and self.is_daemonized(pid) and not os.getppid() == pid:
|
||||
return True
|
||||
else:
|
||||
# We have no os.getppid() on Windows. Best effort.
|
||||
if self.check_pidfile() and self.is_daemonized(pid):
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_daemonized(self, pid):
|
||||
|
|
|
@ -42,7 +42,7 @@ class StdTest(integration.ModuleCase):
|
|||
assert num_ret > 0
|
||||
|
||||
# ping a minion that doesn't exist, to make sure that it doesn't hang forever
|
||||
# create fake mininion
|
||||
# create fake minion
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'footest')
|
||||
# touch the file
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
|
@ -116,6 +116,37 @@ class StdTest(integration.ModuleCase):
|
|||
ret['minion']
|
||||
)
|
||||
|
||||
def test_disconnected_return(self):
|
||||
'''
|
||||
Test return/messaging on a disconnected minion
|
||||
'''
|
||||
test_ret = {'ret': 'Minion did not return. [Not connected]', 'out': 'no_return'}
|
||||
|
||||
# Create a minion key, but do not start the "fake" minion. This mimics
|
||||
# a disconnected minion.
|
||||
key_file = os.path.join(self.master_opts['pki_dir'], 'minions', 'disconnected')
|
||||
salt.utils.fopen(key_file, 'a').close()
|
||||
|
||||
# ping disconnected minion and ensure it times out and returns with correct message
|
||||
try:
|
||||
cmd_iter = self.client.cmd_cli(
|
||||
'disconnected',
|
||||
'test.ping',
|
||||
show_timeout=True
|
||||
)
|
||||
num_ret = 0
|
||||
for ret in cmd_iter:
|
||||
num_ret += 1
|
||||
self.assertEqual(ret['disconnected']['ret'], test_ret['ret'])
|
||||
self.assertEqual(ret['disconnected']['out'], test_ret['out'])
|
||||
|
||||
# Ensure that we entered the loop above
|
||||
self.assertEqual(num_ret, 1)
|
||||
|
||||
finally:
|
||||
os.unlink(key_file)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
from integration import run_tests
|
||||
run_tests(StdTest)
|
||||
|
|
115
tests/unit/modules/inspect_collector_test.py
Normal file
115
tests/unit/modules/inspect_collector_test.py
Normal file
|
@ -0,0 +1,115 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
'''
|
||||
:codeauthor: :email:`Bo Maryniuk <bo@suse.de>`
|
||||
'''
|
||||
# Import Python Libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
|
||||
# Import Salt Testing Libs
|
||||
from salttesting import TestCase, skipIf
|
||||
from salttesting.mock import (
|
||||
MagicMock,
|
||||
patch,
|
||||
NO_MOCK,
|
||||
NO_MOCK_REASON
|
||||
)
|
||||
|
||||
from salt.modules.inspectlib.collector import Inspector
|
||||
from salttesting.helpers import ensure_in_syspath
|
||||
|
||||
ensure_in_syspath('../../')
|
||||
|
||||
|
||||
@skipIf(NO_MOCK, NO_MOCK_REASON)
|
||||
class InspectorCollectorTestCase(TestCase):
|
||||
'''
|
||||
Test inspectlib:collector:Inspector
|
||||
'''
|
||||
def test_env_loader(self):
|
||||
'''
|
||||
Get packages on the different distros.
|
||||
|
||||
:return:
|
||||
'''
|
||||
inspector = Inspector(cachedir='/foo/cache', piddir='/foo/pid', pidfilename='bar.pid')
|
||||
self.assertEqual(inspector.dbfile, '/foo/cache/_minion_collector.db')
|
||||
self.assertEqual(inspector.pidfile, '/foo/pid/bar.pid')
|
||||
|
||||
def test_file_tree(self):
|
||||
'''
|
||||
Test file tree.
|
||||
|
||||
:return:
|
||||
'''
|
||||
|
||||
inspector = Inspector(cachedir='/test', piddir='/test', pidfilename='bar.pid')
|
||||
tree_root = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'inspectlib', 'tree_test')
|
||||
expected_tree = (['/a/a/dummy.a', '/a/b/dummy.b', '/b/b.1', '/b/b.2', '/b/b.3'],
|
||||
['/a', '/a/a', '/a/b', '/a/c', '/b', '/c'],
|
||||
['/a/a/dummy.ln.a', '/a/b/dummy.ln.b', '/a/c/b.1', '/b/b.4',
|
||||
'/b/b.5', '/c/b.1', '/c/b.2', '/c/b.3'])
|
||||
tree_result = []
|
||||
for chunk in inspector._get_all_files(tree_root):
|
||||
buff = []
|
||||
for pth in chunk:
|
||||
buff.append(pth.replace(tree_root, ''))
|
||||
tree_result.append(buff)
|
||||
tree_result = tuple(tree_result)
|
||||
self.assertEqual(expected_tree, tree_result)
|
||||
|
||||
def test_get_unmanaged_files(self):
|
||||
'''
|
||||
Test get_unmanaged_files.
|
||||
|
||||
:return:
|
||||
'''
|
||||
inspector = Inspector(cachedir='/test', piddir='/test', pidfilename='bar.pid')
|
||||
managed = (
|
||||
['a', 'b', 'c'],
|
||||
['d', 'e', 'f'],
|
||||
['g', 'h', 'i'],
|
||||
)
|
||||
system_all = (
|
||||
['a', 'b', 'c'],
|
||||
['d', 'E', 'f'],
|
||||
['G', 'H', 'i'],
|
||||
)
|
||||
self.assertEqual(inspector._get_unmanaged_files(managed=managed, system_all=system_all),
|
||||
([], ['E'], ['G', 'H']))
|
||||
|
||||
def test_pkg_get(self):
|
||||
'''
|
||||
Test if grains switching the pkg get method.
|
||||
|
||||
:return:
|
||||
'''
|
||||
debian_list = """
|
||||
g++
|
||||
g++-4.9
|
||||
g++-5
|
||||
gawk
|
||||
gcc
|
||||
gcc-4.9
|
||||
gcc-4.9-base:amd64
|
||||
gcc-4.9-base:i386
|
||||
gcc-5
|
||||
gcc-5-base:amd64
|
||||
gcc-5-base:i386
|
||||
gcc-6-base:amd64
|
||||
gcc-6-base:i386
|
||||
"""
|
||||
inspector = Inspector(cachedir='/test', piddir='/test', pidfilename='bar.pid')
|
||||
inspector.grains_core = MagicMock()
|
||||
inspector.grains_core.os_data = MagicMock()
|
||||
inspector.grains_core.os_data.get = MagicMock(return_value='Debian')
|
||||
with patch.object(inspector, '_Inspector__get_cfg_pkgs_dpkg', MagicMock(return_value='dpkg')):
|
||||
with patch.object(inspector, '_Inspector__get_cfg_pkgs_rpm', MagicMock(return_value='rpm')):
|
||||
inspector.grains_core = MagicMock()
|
||||
inspector.grains_core.os_data = MagicMock()
|
||||
inspector.grains_core.os_data().get = MagicMock(return_value='Debian')
|
||||
self.assertEqual(inspector._get_cfg_pkgs(), 'dpkg')
|
||||
inspector.grains_core.os_data().get = MagicMock(return_value='SUSE')
|
||||
self.assertEqual(inspector._get_cfg_pkgs(), 'rpm')
|
||||
inspector.grains_core.os_data().get = MagicMock(return_value='redhat')
|
||||
self.assertEqual(inspector._get_cfg_pkgs(), 'rpm')
|
0
tests/unit/modules/inspectlib/tree_test/a/a/dummy.a
Normal file
0
tests/unit/modules/inspectlib/tree_test/a/a/dummy.a
Normal file
1
tests/unit/modules/inspectlib/tree_test/a/a/dummy.ln.a
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/a/a/dummy.ln.a
Symbolic link
|
@ -0,0 +1 @@
|
|||
dummy.a
|
0
tests/unit/modules/inspectlib/tree_test/a/b/dummy.b
Normal file
0
tests/unit/modules/inspectlib/tree_test/a/b/dummy.b
Normal file
1
tests/unit/modules/inspectlib/tree_test/a/b/dummy.ln.b
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/a/b/dummy.ln.b
Symbolic link
|
@ -0,0 +1 @@
|
|||
dummy.b
|
1
tests/unit/modules/inspectlib/tree_test/a/c/b.1
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/a/c/b.1
Symbolic link
|
@ -0,0 +1 @@
|
|||
../../b/b.1
|
1
tests/unit/modules/inspectlib/tree_test/b/b.1
Normal file
1
tests/unit/modules/inspectlib/tree_test/b/b.1
Normal file
|
@ -0,0 +1 @@
|
|||
B.1
|
0
tests/unit/modules/inspectlib/tree_test/b/b.2
Normal file
0
tests/unit/modules/inspectlib/tree_test/b/b.2
Normal file
0
tests/unit/modules/inspectlib/tree_test/b/b.3
Normal file
0
tests/unit/modules/inspectlib/tree_test/b/b.3
Normal file
1
tests/unit/modules/inspectlib/tree_test/b/b.4
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/b/b.4
Symbolic link
|
@ -0,0 +1 @@
|
|||
../c/b.1
|
1
tests/unit/modules/inspectlib/tree_test/b/b.5
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/b/b.5
Symbolic link
|
@ -0,0 +1 @@
|
|||
b.4
|
1
tests/unit/modules/inspectlib/tree_test/c/b.1
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/c/b.1
Symbolic link
|
@ -0,0 +1 @@
|
|||
../b/b.1
|
1
tests/unit/modules/inspectlib/tree_test/c/b.2
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/c/b.2
Symbolic link
|
@ -0,0 +1 @@
|
|||
../b/b.2
|
1
tests/unit/modules/inspectlib/tree_test/c/b.3
Symbolic link
1
tests/unit/modules/inspectlib/tree_test/c/b.3
Symbolic link
|
@ -0,0 +1 @@
|
|||
../b/b.3
|
Loading…
Add table
Reference in a new issue