Merge remote-tracking branch 'upstream/2014.7' into merge-forward-2015.2

Conflicts:
	.pylintrc
	.testing.pylintrc
	salt/states/file.py
	salt/utils/__init__.py
This commit is contained in:
Colton Myers 2015-01-26 15:53:05 -07:00
commit ce9d2cb6cb
6 changed files with 89 additions and 25 deletions

View file

@ -194,7 +194,7 @@ dummy-variables-rgx=_|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=__opts__,__salt__,__pillar__,__grains__,__context__,__ret__,__env__,__low__,__states__,__lowstate__,__running__,__active_provider_name__,__master_opts__,__progress__
additional-builtins=__opts__,__salt__,__pillar__,__grains__,__context__,__ret__,__env__,__low__,__states__,__lowstate__,__running__,__active_provider_name__,__master_opts__,__progress__,__instance_id__
[SIMILARITIES]

View file

@ -154,7 +154,7 @@ indent-string=' '
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=__opts__,__salt__,__pillar__,__grains__,__context__,__ret__,__env__,__low__,__states__,__lowstate__,__running__,__active_provider_name__,__master_opts__,__progress__
additional-builtins=__opts__,__salt__,__pillar__,__grains__,__context__,__ret__,__env__,__low__,__states__,__lowstate__,__running__,__active_provider_name__,__master_opts__,__progress__,__instance_id__
[IMPORTS]

View file

@ -242,17 +242,16 @@ def _verify_dulwich(quiet=False):
log.error(_RECOMMEND_PYGIT2)
return False
dulwich_ver = distutils.version.LooseVersion(dulwich.__version__)
dulwich_minver_str = '0.9.4'
dulwich_minver = distutils.version.LooseVersion(dulwich_minver_str)
dulwich_version = dulwich.__version__
dulwich_min_version = (0, 9, 4)
errors = []
if dulwich_ver < dulwich_minver:
if dulwich_version < dulwich_min_version:
errors.append(
'Git fileserver backend is enabled in the master config file, but '
'the installed version of Dulwich is earlier than {0}. Version {1} '
'detected.'.format(dulwich_minver_str, dulwich.__version__)
'detected.'.format(dulwich_min_version, dulwich_version)
)
if HAS_PYGIT2 and not quiet:

View file

@ -572,6 +572,7 @@ class State(object):
self.pre = {}
self.__run_num = 0
self.jid = jid
self.instance_id = str(id(self))
def _gather_pillar(self):
'''
@ -1504,6 +1505,7 @@ class State(object):
# state module to change these at runtime.
'__low__': immutabletypes.freeze(low),
'__running__': immutabletypes.freeze(running) if running else {},
'__instance_id__': self.instance_id,
'__lowstate__': immutabletypes.freeze(chunks) if chunks else {}
}
@ -2043,6 +2045,21 @@ class State(object):
return errors
ret = dict(list(disabled.items()) + list(self.call_chunks(chunks).items()))
ret = self.call_listen(chunks, ret)
def _cleanup_accumulator_data():
accum_data_path = os.path.join(
salt.utils.get_accumulator_dir(self.opts['cachedir']),
self.instance_id
)
try:
os.remove(accum_data_path)
log.debug('Deleted accumulator data file %s',
accum_data_path)
except OSError:
log.debug('File %s does not exist, no need to cleanup.',
accum_data_path)
_cleanup_accumulator_data()
return ret
def render_template(self, high, template):

View file

@ -245,6 +245,7 @@ import traceback
import yaml
# Import salt libs
import salt.payload
import salt.utils
import salt.utils.templates
from salt.exceptions import CommandExecutionError
@ -258,8 +259,39 @@ log = logging.getLogger(__name__)
COMMENT_REGEX = r'^([[:space:]]*){0}[[:space:]]?'
_ACCUMULATORS = {}
_ACCUMULATORS_DEPS = {}
def _get_accumulator_filepath():
'''
Return accumulator data path.
'''
return os.path.join(salt.utils.get_accumulator_dir(__opts__['cachedir']),
__instance_id__)
def _load_accumulators():
def _deserialize(path):
serial = salt.payload.Serial(__opts__)
ret = {'accumulators': {}, 'accumulators_deps': {}}
try:
with open(path, 'rb') as f:
loaded = serial.load(f)
return loaded if loaded else ret
except IOError:
return ret
loaded = _deserialize(_get_accumulator_filepath())
return loaded['accumulators'], loaded['accumulators_deps']
def _persist_accummulators(accumulators, accumulators_deps):
accumm_data = {'accumulators': accumulators,
'accumulators_deps': accumulators_deps}
serial = salt.payload.Serial(__opts__)
with open(_get_accumulator_filepath(), 'w+b') as f:
serial.dump(accumm_data, f)
def _check_user(user, group):
@ -1348,10 +1380,11 @@ def managed(name,
'No changes made.'.format(name))
return ret
if name in _ACCUMULATORS:
accum_data, _ = _load_accumulators()
if name in accum_data:
if not context:
context = {}
context['accumulator'] = _ACCUMULATORS[name]
context['accumulator'] = accum_data[name]
try:
if __opts__['test']:
@ -2507,11 +2540,12 @@ def blockreplace(
if not check_res:
return _error(ret, check_msg)
if name in _ACCUMULATORS:
accumulator = _ACCUMULATORS[name]
accum_data, accum_deps = _load_accumulators()
if name in accum_data:
accumulator = accum_data[name]
# if we have multiple accumulators for a file, only apply the one
# required at a time
deps = _ACCUMULATORS_DEPS.get(name, [])
deps = accum_deps.get(name, [])
filtered = [a for a in deps if
__low__['__id__'] in deps[a] and a in accumulator]
if not filtered:
@ -3728,21 +3762,23 @@ def accumulated(name, filename, text, **kwargs):
return ret
if isinstance(text, string_types):
text = (text,)
if filename not in _ACCUMULATORS:
_ACCUMULATORS[filename] = {}
if filename not in _ACCUMULATORS_DEPS:
_ACCUMULATORS_DEPS[filename] = {}
if name not in _ACCUMULATORS_DEPS[filename]:
_ACCUMULATORS_DEPS[filename][name] = []
accum_data, accum_deps = _load_accumulators()
if filename not in accum_data:
accum_data[filename] = {}
if filename not in accum_deps:
accum_deps[filename] = {}
if name not in accum_deps[filename]:
accum_deps[filename][name] = []
for accumulator in deps:
_ACCUMULATORS_DEPS[filename][name].extend(six.itervalues(accumulator))
if name not in _ACCUMULATORS[filename]:
_ACCUMULATORS[filename][name] = []
accum_deps[filename][name].extend(six.itervalues(accumulator))
if name not in accum_data[filename]:
accum_data[filename][name] = []
for chunk in text:
if chunk not in _ACCUMULATORS[filename][name]:
_ACCUMULATORS[filename][name].append(chunk)
if chunk not in accum_data[filename][name]:
accum_data[filename][name].append(chunk)
ret['comment'] = ('Accumulator {0} for file {1} '
'was charged by text'.format(name, filename))
_persist_accummulators(accum_data, accum_deps)
return ret

View file

@ -522,6 +522,18 @@ def required_modules_error(name, docstring):
return msg.format(filename, ', '.join(modules))
def get_accumulator_dir(cachedir):
'''
Return the directory that accumulator data is stored in, creating it if it
doesn't exist.
'''
fn_ = os.path.join(cachedir, 'accumulator')
if not os.path.isdir(fn_):
# accumulator_dir is not present, create it
os.makedirs(fn_)
return fn_
def check_or_die(command):
'''
Simple convenience function for modules to use for gracefully blowing up