mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Merge branch '2014.7' into develop
Conflicts: setup.py
This commit is contained in:
commit
dd67f54abe
6 changed files with 283 additions and 361 deletions
2
doc/_templates/version.html
vendored
2
doc/_templates/version.html
vendored
|
@ -10,5 +10,5 @@
|
|||
<p>Latest Salt release: <a href="{{ pathto('topics/releases/{0}'.format(release)) }}">{{ release }}</a></p>
|
||||
|
||||
<p>Try the shiny new release candidate of Salt,
|
||||
<a href="{{ pathto('topics/releases/2014.7.0') }}">v2014.7.0rc6</a>! More info
|
||||
<a href="{{ pathto('topics/releases/2014.7.0') }}">v2014.7.0rc7</a>! More info
|
||||
<a href="{{ pathto('topics/releases/releasecandidate') }}">here</a>.</p>
|
||||
|
|
|
@ -52,6 +52,7 @@ import re
|
|||
import shutil
|
||||
import subprocess
|
||||
from datetime import datetime
|
||||
from salt._compat import text_type as _text_type
|
||||
|
||||
VALID_PROVIDERS = ('gitpython', 'pygit2', 'dulwich')
|
||||
PER_REMOTE_PARAMS = ('base', 'mountpoint', 'root')
|
||||
|
@ -614,14 +615,18 @@ def init():
|
|||
|
||||
per_remote_defaults = {}
|
||||
for param in override_params:
|
||||
per_remote_defaults[param] = __opts__['gitfs_{0}'.format(param)]
|
||||
per_remote_defaults[param] = \
|
||||
_text_type(__opts__['gitfs_{0}'.format(param)])
|
||||
|
||||
for remote in __opts__['gitfs_remotes']:
|
||||
repo_conf = copy.deepcopy(per_remote_defaults)
|
||||
bad_per_remote_conf = False
|
||||
if isinstance(remote, dict):
|
||||
repo_url = next(iter(remote))
|
||||
per_remote_conf = salt.utils.repack_dictlist(remote[repo_url])
|
||||
per_remote_conf = dict(
|
||||
[(key, _text_type(val)) for key, val in
|
||||
salt.utils.repack_dictlist(remote[repo_url]).items()]
|
||||
)
|
||||
if not per_remote_conf:
|
||||
log.error(
|
||||
'Invalid per-remote configuration for remote {0}. If no '
|
||||
|
@ -1253,10 +1258,9 @@ def serve_file(load, fnd):
|
|||
required_load_keys = set(['path', 'loc', 'saltenv'])
|
||||
if not all(x in load for x in required_load_keys):
|
||||
log.debug(
|
||||
'Not all of the required key in load are present. Missing: {0}'.format(
|
||||
', '.join(
|
||||
required_load_keys.difference(load.keys())
|
||||
)
|
||||
'Not all of the required keys present in payload. '
|
||||
'Missing: {0}'.format(
|
||||
', '.join(required_load_keys.difference(load.keys()))
|
||||
)
|
||||
)
|
||||
return ret
|
||||
|
|
|
@ -35,6 +35,7 @@ import logging
|
|||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from salt._compat import text_type as _text_type
|
||||
|
||||
VALID_BRANCH_METHODS = ('branches', 'bookmarks', 'mixed')
|
||||
PER_REMOTE_PARAMS = ('base', 'branch_method', 'mountpoint', 'root')
|
||||
|
@ -170,19 +171,23 @@ def init():
|
|||
|
||||
per_remote_defaults = {}
|
||||
for param in PER_REMOTE_PARAMS:
|
||||
per_remote_defaults[param] = __opts__['hgfs_{0}'.format(param)]
|
||||
per_remote_defaults[param] = \
|
||||
_text_type(__opts__['hgfs_{0}'.format(param)])
|
||||
|
||||
for remote in __opts__['hgfs_remotes']:
|
||||
repo_conf = copy.deepcopy(per_remote_defaults)
|
||||
if isinstance(remote, dict):
|
||||
repo_uri = next(iter(remote))
|
||||
per_remote_conf = salt.utils.repack_dictlist(remote[repo_uri])
|
||||
repo_url = next(iter(remote))
|
||||
per_remote_conf = dict(
|
||||
[(key, _text_type(val)) for key, val in
|
||||
salt.utils.repack_dictlist(remote[repo_url]).items()]
|
||||
)
|
||||
if not per_remote_conf:
|
||||
log.error(
|
||||
'Invalid per-remote configuration for remote {0}. If no '
|
||||
'per-remote parameters are being specified, there may be '
|
||||
'a trailing colon after the URI, which should be removed. '
|
||||
'Check the master configuration file.'.format(repo_uri)
|
||||
'Check the master configuration file.'.format(repo_url)
|
||||
)
|
||||
|
||||
branch_method = \
|
||||
|
@ -192,7 +197,7 @@ def init():
|
|||
log.error(
|
||||
'Invalid branch_method {0!r} for remote {1}. Valid '
|
||||
'branch methods are: {2}. This remote will be ignored.'
|
||||
.format(branch_method, repo_uri,
|
||||
.format(branch_method, repo_url,
|
||||
', '.join(VALID_BRANCH_METHODS))
|
||||
)
|
||||
continue
|
||||
|
@ -203,18 +208,18 @@ def init():
|
|||
'Invalid configuration parameter {0!r} for remote {1}. '
|
||||
'Valid parameters are: {2}. See the documentation for '
|
||||
'further information.'.format(
|
||||
param, repo_uri, ', '.join(PER_REMOTE_PARAMS)
|
||||
param, repo_url, ', '.join(PER_REMOTE_PARAMS)
|
||||
)
|
||||
)
|
||||
per_remote_conf.pop(param)
|
||||
repo_conf.update(per_remote_conf)
|
||||
else:
|
||||
repo_uri = remote
|
||||
repo_url = remote
|
||||
|
||||
if not isinstance(repo_uri, string_types):
|
||||
if not isinstance(repo_url, string_types):
|
||||
log.error(
|
||||
'Invalid gitfs remote {0}. Remotes must be strings, you may '
|
||||
'need to enclose the URI in quotes'.format(repo_uri)
|
||||
'need to enclose the URI in quotes'.format(repo_url)
|
||||
)
|
||||
continue
|
||||
|
||||
|
@ -227,7 +232,7 @@ def init():
|
|||
pass
|
||||
|
||||
hash_type = getattr(hashlib, __opts__.get('hash_type', 'md5'))
|
||||
repo_hash = hash_type(repo_uri).hexdigest()
|
||||
repo_hash = hash_type(repo_url).hexdigest()
|
||||
rp_ = os.path.join(bp_, repo_hash)
|
||||
if not os.path.isdir(rp_):
|
||||
os.makedirs(rp_)
|
||||
|
@ -243,7 +248,7 @@ def init():
|
|||
'Cache path {0} (corresponding remote: {1}) exists but is not '
|
||||
'a valid mercurial repository. You will need to manually '
|
||||
'delete this directory on the master to continue to use this '
|
||||
'hgfs remote.'.format(rp_, repo_uri)
|
||||
'hgfs remote.'.format(rp_, repo_url)
|
||||
)
|
||||
continue
|
||||
|
||||
|
@ -253,11 +258,11 @@ def init():
|
|||
hgconfpath = os.path.join(rp_, '.hg', 'hgrc')
|
||||
with salt.utils.fopen(hgconfpath, 'w+') as hgconfig:
|
||||
hgconfig.write('[paths]\n')
|
||||
hgconfig.write('default = {0}\n'.format(repo_uri))
|
||||
hgconfig.write('default = {0}\n'.format(repo_url))
|
||||
|
||||
repo_conf.update({
|
||||
'repo': repo,
|
||||
'uri': repo_uri,
|
||||
'url': repo_url,
|
||||
'hash': repo_hash,
|
||||
'cachedir': rp_
|
||||
})
|
||||
|
@ -271,7 +276,7 @@ def init():
|
|||
timestamp = datetime.now().strftime('%d %b %Y %H:%M:%S.%f')
|
||||
fp_.write('# hgfs_remote map as of {0}\n'.format(timestamp))
|
||||
for repo in repos:
|
||||
fp_.write('{0} = {1}\n'.format(repo['hash'], repo['uri']))
|
||||
fp_.write('{0} = {1}\n'.format(repo['hash'], repo['url']))
|
||||
except OSError:
|
||||
pass
|
||||
else:
|
||||
|
@ -323,7 +328,7 @@ def update():
|
|||
except Exception as exc:
|
||||
log.error(
|
||||
'Exception {0} caught while updating hgfs remote {1}'
|
||||
.format(exc, repo['uri']),
|
||||
.format(exc, repo['url']),
|
||||
exc_info_on_loglevel=logging.DEBUG
|
||||
)
|
||||
else:
|
||||
|
|
|
@ -30,6 +30,7 @@ import logging
|
|||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
from salt._compat import text_type as _text_type
|
||||
|
||||
PER_REMOTE_PARAMS = ('mountpoint', 'root', 'trunk', 'branches', 'tags')
|
||||
|
||||
|
@ -90,7 +91,7 @@ def _rev(repo):
|
|||
log.error(
|
||||
'Error retrieving revision ID for svnfs remote {0} '
|
||||
'(cachedir: {1}): {2}'
|
||||
.format(repo['uri'], repo['repo'], exc)
|
||||
.format(repo['url'], repo['repo'], exc)
|
||||
)
|
||||
else:
|
||||
return repo_info['revision'].number
|
||||
|
@ -107,19 +108,23 @@ def init():
|
|||
|
||||
per_remote_defaults = {}
|
||||
for param in PER_REMOTE_PARAMS:
|
||||
per_remote_defaults[param] = __opts__['svnfs_{0}'.format(param)]
|
||||
per_remote_defaults[param] = \
|
||||
_text_type(__opts__['svnfs_{0}'.format(param)])
|
||||
|
||||
for remote in __opts__['svnfs_remotes']:
|
||||
repo_conf = copy.deepcopy(per_remote_defaults)
|
||||
if isinstance(remote, dict):
|
||||
repo_uri = next(iter(remote))
|
||||
per_remote_conf = salt.utils.repack_dictlist(remote[repo_uri])
|
||||
repo_url = next(iter(remote))
|
||||
per_remote_conf = dict(
|
||||
[(key, _text_type(val)) for key, val in
|
||||
salt.utils.repack_dictlist(remote[repo_url]).items()]
|
||||
)
|
||||
if not per_remote_conf:
|
||||
log.error(
|
||||
'Invalid per-remote configuration for remote {0}. If no '
|
||||
'per-remote parameters are being specified, there may be '
|
||||
'a trailing colon after the URI, which should be removed. '
|
||||
'Check the master configuration file.'.format(repo_uri)
|
||||
'Check the master configuration file.'.format(repo_url)
|
||||
)
|
||||
|
||||
for param in (x for x in per_remote_conf
|
||||
|
@ -128,18 +133,18 @@ def init():
|
|||
'Invalid configuration parameter {0!r} for remote {1}. '
|
||||
'Valid parameters are: {2}. See the documentation for '
|
||||
'further information.'.format(
|
||||
param, repo_uri, ', '.join(PER_REMOTE_PARAMS)
|
||||
param, repo_url, ', '.join(PER_REMOTE_PARAMS)
|
||||
)
|
||||
)
|
||||
per_remote_conf.pop(param)
|
||||
repo_conf.update(per_remote_conf)
|
||||
else:
|
||||
repo_uri = remote
|
||||
repo_url = remote
|
||||
|
||||
if not isinstance(repo_uri, string_types):
|
||||
if not isinstance(repo_url, string_types):
|
||||
log.error(
|
||||
'Invalid gitfs remote {0}. Remotes must be strings, you may '
|
||||
'need to enclose the URI in quotes'.format(repo_uri)
|
||||
'need to enclose the URI in quotes'.format(repo_url)
|
||||
)
|
||||
continue
|
||||
|
||||
|
@ -152,7 +157,7 @@ def init():
|
|||
pass
|
||||
|
||||
hash_type = getattr(hashlib, __opts__.get('hash_type', 'md5'))
|
||||
repo_hash = hash_type(repo_uri).hexdigest()
|
||||
repo_hash = hash_type(repo_url).hexdigest()
|
||||
rp_ = os.path.join(bp_, repo_hash)
|
||||
if not os.path.isdir(rp_):
|
||||
os.makedirs(rp_)
|
||||
|
@ -160,13 +165,13 @@ def init():
|
|||
if not os.listdir(rp_):
|
||||
# Only attempt a new checkout if the directory is empty.
|
||||
try:
|
||||
CLIENT.checkout(repo_uri, rp_)
|
||||
CLIENT.checkout(repo_url, rp_)
|
||||
repos.append(rp_)
|
||||
new_remote = True
|
||||
except pysvn._pysvn.ClientError as exc:
|
||||
log.error(
|
||||
'Failed to initialize svnfs remote {0!r}: {1}'
|
||||
.format(repo_uri, exc)
|
||||
.format(repo_url, exc)
|
||||
)
|
||||
continue
|
||||
else:
|
||||
|
@ -179,13 +184,13 @@ def init():
|
|||
'Cache path {0} (corresponding remote: {1}) exists but is '
|
||||
'not a valid subversion checkout. You will need to '
|
||||
'manually delete this directory on the master to continue '
|
||||
'to use this svnfs remote.'.format(rp_, repo_uri)
|
||||
'to use this svnfs remote.'.format(rp_, repo_url)
|
||||
)
|
||||
continue
|
||||
|
||||
repo_conf.update({
|
||||
'repo': rp_,
|
||||
'uri': repo_uri,
|
||||
'url': repo_url,
|
||||
'hash': repo_hash,
|
||||
'cachedir': rp_
|
||||
})
|
||||
|
@ -200,7 +205,7 @@ def init():
|
|||
for repo_conf in repos:
|
||||
fp_.write(
|
||||
'{0} = {1}\n'.format(
|
||||
repo_conf['hash'], repo_conf['uri']
|
||||
repo_conf['hash'], repo_conf['url']
|
||||
)
|
||||
)
|
||||
except OSError:
|
||||
|
@ -253,7 +258,7 @@ def update():
|
|||
except pysvn._pysvn.ClientError as exc:
|
||||
log.error(
|
||||
'Error updating svnfs remote {0} (cachedir: {1}): {2}'
|
||||
.format(repo['uri'], repo['cachedir'], exc)
|
||||
.format(repo['url'], repo['cachedir'], exc)
|
||||
)
|
||||
try:
|
||||
os.remove(lk_fn)
|
||||
|
@ -328,7 +333,7 @@ def envs(ignore_cache=False):
|
|||
log.error(
|
||||
'svnfs trunk path {0!r} does not exist in repo {1}, no base '
|
||||
'environment will be provided by this remote'
|
||||
.format(repo['trunk'], repo['uri'])
|
||||
.format(repo['trunk'], repo['url'])
|
||||
)
|
||||
|
||||
branches = os.path.join(repo['repo'], repo['branches'])
|
||||
|
@ -337,7 +342,7 @@ def envs(ignore_cache=False):
|
|||
else:
|
||||
log.error(
|
||||
'svnfs branches path {0!r} does not exist in repo {1}'
|
||||
.format(repo['branches'], repo['uri'])
|
||||
.format(repo['branches'], repo['url'])
|
||||
)
|
||||
|
||||
tags = os.path.join(repo['repo'], repo['tags'])
|
||||
|
@ -346,7 +351,7 @@ def envs(ignore_cache=False):
|
|||
else:
|
||||
log.error(
|
||||
'svnfs tags path {0!r} does not exist in repo {1}'
|
||||
.format(repo['tags'], repo['uri'])
|
||||
.format(repo['tags'], repo['url'])
|
||||
)
|
||||
return [x for x in sorted(ret) if _env_is_exposed(x)]
|
||||
|
||||
|
|
|
@ -1510,18 +1510,18 @@ class State(object):
|
|||
ret.update(self._run_check(low))
|
||||
|
||||
if 'saltenv' in low:
|
||||
inject_globals['__env__'] = low['saltenv']
|
||||
inject_globals['__env__'] = str(low['saltenv'])
|
||||
elif isinstance(cdata['kwargs'].get('env', None), string_types):
|
||||
# User is using a deprecated env setting which was parsed by
|
||||
# format_call.
|
||||
# We check for a string type since module functions which
|
||||
# allow setting the OS environ also make use of the "env"
|
||||
# keyword argument, which is not a string
|
||||
inject_globals['__env__'] = cdata['kwargs']['env']
|
||||
inject_globals['__env__'] = str(cdata['kwargs']['env'])
|
||||
elif '__env__' in low:
|
||||
# The user is passing an alternative environment using __env__
|
||||
# which is also not the appropriate choice, still, handle it
|
||||
inject_globals['__env__'] = low['__env__']
|
||||
inject_globals['__env__'] = str(low['__env__'])
|
||||
else:
|
||||
# Let's use the default environment
|
||||
inject_globals['__env__'] = 'base'
|
||||
|
|
542
setup.py
542
setup.py
|
@ -4,10 +4,10 @@
|
|||
The setup script for salt
|
||||
'''
|
||||
|
||||
# pylint: disable=C0111,E1101,E1103,F0401,W0611
|
||||
# pylint: disable=C0111,E1101,E1103,F0401,W0611,W0201,W0232,R0201,R0902,R0903
|
||||
|
||||
# For Python 2.5. A no-op on 2.6 and above.
|
||||
from __future__ import with_statement
|
||||
from __future__ import print_function, with_statement
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
@ -15,6 +15,7 @@ import glob
|
|||
import urllib2
|
||||
from datetime import datetime
|
||||
# pylint: disable=E0611
|
||||
import distutils.dist
|
||||
from distutils import log
|
||||
from distutils.cmd import Command
|
||||
from distutils.errors import DistutilsArgError
|
||||
|
@ -104,11 +105,9 @@ SALT_SYSPATHS = os.path.join(os.path.abspath(SETUP_DIRNAME), 'salt', 'syspaths.p
|
|||
|
||||
# Salt SSH Packaging Detection
|
||||
PACKAGED_FOR_SALT_SSH_FILE = os.path.join(os.path.abspath(SETUP_DIRNAME), '.salt-ssh-package')
|
||||
if '--ssh-packaging' in sys.argv:
|
||||
with open(PACKAGED_FOR_SALT_SSH_FILE, 'w+') as fp_:
|
||||
fp_.write(' ')
|
||||
PACKAGED_FOR_SALT_SSH = os.path.isfile(PACKAGED_FOR_SALT_SSH_FILE)
|
||||
|
||||
|
||||
# pylint: disable=W0122
|
||||
exec(compile(open(SALT_VERSION).read(), SALT_VERSION, 'exec'))
|
||||
exec(compile(open(SALT_SYSPATHS).read(), SALT_SYSPATHS, 'exec'))
|
||||
|
@ -145,7 +144,7 @@ class WriteSaltVersion(Command):
|
|||
if not os.path.exists(SALT_VERSION_HARDCODED):
|
||||
# Write the version file
|
||||
if getattr(self.distribution, 'salt_version_hardcoded_path', None) is None:
|
||||
print 'This command is not meant to be called on it\'s own'
|
||||
print('This command is not meant to be called on it\'s own')
|
||||
exit(1)
|
||||
|
||||
# pylint: disable=E0602
|
||||
|
@ -172,7 +171,7 @@ class WriteSaltSshPackaingFile(Command):
|
|||
if not os.path.exists(PACKAGED_FOR_SALT_SSH_FILE):
|
||||
# Write the salt-ssh packaging file
|
||||
if getattr(self.distribution, 'salt_ssh_packaging_file', None) is None:
|
||||
print 'This command is not meant to be called on it\'s own'
|
||||
print('This command is not meant to be called on it\'s own')
|
||||
exit(1)
|
||||
|
||||
# pylint: disable=E0602
|
||||
|
@ -180,44 +179,13 @@ class WriteSaltSshPackaingFile(Command):
|
|||
# pylint: enable=E0602
|
||||
|
||||
|
||||
if WITH_SETUPTOOLS is True:
|
||||
class EggInfo(egg_info):
|
||||
|
||||
def finalize_options(self):
|
||||
if getattr(self.distribution, 'packaged_for_salt_ssh', PACKAGED_FOR_SALT_SSH):
|
||||
self.distribution.metadata.name = 'salt-ssh'
|
||||
egg_info.finalize_options(self)
|
||||
|
||||
|
||||
class Sdist(sdist):
|
||||
user_options = sdist.user_options + [
|
||||
('ssh-packaging', None, 'Prepare the salt-ssh packaging')
|
||||
]
|
||||
boolean_options = sdist.boolean_options + [
|
||||
'ssh-packaging'
|
||||
]
|
||||
|
||||
def initialize_options(self):
|
||||
sdist.initialize_options(self)
|
||||
self.ssh_packaging = PACKAGED_FOR_SALT_SSH
|
||||
|
||||
def finalize_options(self):
|
||||
sdist.finalize_options(self)
|
||||
self.distribution.packaged_for_salt_ssh = self.ssh_packaging
|
||||
|
||||
def make_release_tree(self, base_dir, files):
|
||||
if self.ssh_packaging:
|
||||
if self.distribution.ssh_packaging:
|
||||
self.distribution.salt_ssh_packaging_file = PACKAGED_FOR_SALT_SSH_FILE
|
||||
self.run_command('write-salt-ssh-packaging-file')
|
||||
self.distribution.package_data.pop('salt.daemons.flo', None)
|
||||
self.filelist.files.append(os.path.basename(PACKAGED_FOR_SALT_SSH_FILE))
|
||||
self.distribution.metadata.name = 'salt-ssh'
|
||||
self.distribution.data_files = [('share/man/man1',
|
||||
['doc/man/salt-ssh.1',
|
||||
'doc/man/salt-run.1',
|
||||
'doc/man/salt-call.1',
|
||||
'doc/man/salt-cloud.1']),
|
||||
('share/man/man7', ['doc/man/salt.7'])]
|
||||
|
||||
sdist.make_release_tree(self, base_dir, files)
|
||||
|
||||
|
@ -230,7 +198,7 @@ class Sdist(sdist):
|
|||
|
||||
def make_distribution(self):
|
||||
sdist.make_distribution(self)
|
||||
if self.ssh_packaging:
|
||||
if self.distribution.ssh_packaging:
|
||||
os.unlink(PACKAGED_FOR_SALT_SSH_FILE)
|
||||
|
||||
|
||||
|
@ -325,33 +293,13 @@ class CloudSdist(Sdist):
|
|||
Sdist.run(self)
|
||||
|
||||
def write_manifest(self):
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
# Remove un-necessary scripts grabbed by MANIFEST.in
|
||||
for filename in self.filelist.files[:]:
|
||||
if filename in ('scripts/salt',
|
||||
'scripts/salt-api',
|
||||
'scripts/salt-cloud',
|
||||
'scripts/salt-api',
|
||||
'scripts/salt-key',
|
||||
'scripts/salt-master',
|
||||
'scripts/salt-run',
|
||||
'scripts/salt-ssh',
|
||||
'scripts/salt-syndic'):
|
||||
self.filelist.files.pop(
|
||||
self.filelist.files.index(filename)
|
||||
)
|
||||
elif self.distribution.packaged_for_salt_ssh:
|
||||
# Remove un-necessary script from Salt-SSH package
|
||||
for filename in self.filelist.files[:]:
|
||||
if not filename.startswith('scripts/'):
|
||||
continue
|
||||
if filename not in ('scripts/salt-ssh',
|
||||
'scripts/salt-run',
|
||||
'scripts/salt-call',
|
||||
'scripts/salt-cloud'):
|
||||
self.filelist.files.pop(
|
||||
self.filelist.files.index(filename)
|
||||
)
|
||||
# We only need to ship the scripts which are supposed to be installed
|
||||
dist_scripts = self.distribution.scripts
|
||||
for script in self.filelist.files[:]:
|
||||
if not script.startswith('scripts/'):
|
||||
continue
|
||||
if script not in dist_scripts:
|
||||
self.filelist.files.remove(script)
|
||||
return Sdist.write_manifest(self)
|
||||
|
||||
|
||||
|
@ -392,7 +340,7 @@ class Clean(clean):
|
|||
# Let's clean compiled *.py[c,o]
|
||||
for subdir in ('salt', 'tests', 'doc'):
|
||||
root = os.path.join(os.path.dirname(__file__), subdir)
|
||||
for dirname, dirnames, filenames in os.walk(root):
|
||||
for dirname, _, _ in os.walk(root):
|
||||
for to_remove_filename in glob.glob('{0}/*.py[oc]'.format(dirname)):
|
||||
os.remove(to_remove_filename)
|
||||
|
||||
|
@ -458,9 +406,6 @@ class Build(build):
|
|||
|
||||
class Install(install):
|
||||
user_options = install.user_options + [
|
||||
('salt-transport=', None,
|
||||
'The transport to prepare salt for. Choices are \'zeromq\' '
|
||||
'\'raet\' or \'both\'. Defaults to \'zeromq\''),
|
||||
('salt-root-dir=', None,
|
||||
'Salt\'s pre-configured root directory'),
|
||||
('salt-config-dir=', None,
|
||||
|
@ -485,14 +430,7 @@ class Install(install):
|
|||
|
||||
def initialize_options(self):
|
||||
install.initialize_options(self)
|
||||
if not hasattr(self.distribution, 'install_requires'):
|
||||
# Non setuptools installation
|
||||
self.distribution.install_requires = _parse_requirements_file(SALT_REQS)
|
||||
# pylint: disable=E0602
|
||||
if PACKAGED_FOR_SALT_SSH:
|
||||
self.salt_transport = 'ssh'
|
||||
else:
|
||||
self.salt_transport = 'zeromq'
|
||||
# pylint: disable=undefined-variable
|
||||
self.salt_root_dir = ROOT_DIR
|
||||
self.salt_config_dir = CONFIG_DIR
|
||||
self.salt_cache_dir = CACHE_DIR
|
||||
|
@ -503,12 +441,10 @@ class Install(install):
|
|||
self.salt_base_master_roots_dir = BASE_MASTER_ROOTS_DIR
|
||||
self.salt_logs_dir = LOGS_DIR
|
||||
self.salt_pidfile_dir = PIDFILE_DIR
|
||||
# pylint: enable=E0602
|
||||
# pylint: enable=undefined-variable
|
||||
|
||||
def finalize_options(self):
|
||||
install.finalize_options(self)
|
||||
if PACKAGED_FOR_SALT_SSH and self.salt_transport != 'ssh':
|
||||
raise DistutilsArgError('The only available transport for salt-ssh is \'ssh\'')
|
||||
|
||||
for optname in ('root_dir', 'config_dir', 'cache_dir', 'sock_dir',
|
||||
'srv_root_dir', 'base_file_roots_dir',
|
||||
|
@ -523,28 +459,6 @@ class Install(install):
|
|||
)
|
||||
setattr(self.distribution, 'salt_{0}'.format(optname), optvalue)
|
||||
|
||||
if self.salt_transport not in ('zeromq', 'raet', 'both', 'ssh', 'none'):
|
||||
raise DistutilsArgError(
|
||||
'The value of --salt-transport needs be \'zeromq\', '
|
||||
'\'raet\', \'both\' \'ssh\' or \'none\' not {0!r}'.format(
|
||||
self.salt_transport
|
||||
)
|
||||
)
|
||||
elif self.salt_transport in ('ssh', 'none'):
|
||||
for requirement in _parse_requirements_file(SALT_ZEROMQ_REQS):
|
||||
if requirement not in self.distribution.install_requires:
|
||||
continue
|
||||
self.distribution.install_requires.remove(requirement)
|
||||
elif self.salt_transport in ('raet', 'both'):
|
||||
self.distribution.install_requires.extend(
|
||||
_parse_requirements_file(SALT_RAET_REQS)
|
||||
)
|
||||
if self.salt_transport == 'raet':
|
||||
for requirement in _parse_requirements_file(SALT_ZEROMQ_REQS):
|
||||
if requirement not in self.distribution.install_requires:
|
||||
continue
|
||||
self.distribution.install_requires.remove(requirement)
|
||||
|
||||
def run(self):
|
||||
# Let's set the running_salt_install attribute so we can add
|
||||
# _version.py in the build command
|
||||
|
@ -671,233 +585,227 @@ SETUP_KWARGS = {'name': NAME,
|
|||
['doc/man/salt-cp.1',
|
||||
'doc/man/salt-call.1',
|
||||
'doc/man/salt-minion.1',
|
||||
'doc/man/salt-unity.1',
|
||||
]),
|
||||
('share/man/man7',
|
||||
['doc/man/salt.7',
|
||||
]),
|
||||
],
|
||||
# Required for esky builds, ZeroMQ or RAET deps will be added
|
||||
# at install time
|
||||
'install_requires':
|
||||
_parse_requirements_file(SALT_REQS) +
|
||||
_parse_requirements_file(SALT_ZEROMQ_REQS),
|
||||
'extras_require': {
|
||||
'RAET': _parse_requirements_file(SALT_RAET_REQS),
|
||||
'Cloud': _parse_requirements_file(SALT_CLOUD_REQS)
|
||||
},
|
||||
# The dynamic module loading in salt.modules makes this
|
||||
# package zip unsafe. Required for esky builds
|
||||
'zip_safe': False
|
||||
}
|
||||
'doc/man/salt-run.1',
|
||||
'doc/man/salt-ssh.1',
|
||||
'doc/man/salt-syndic.1',
|
||||
'doc/man/salt-unity.1'])
|
||||
return data_files
|
||||
|
||||
if PACKAGED_FOR_SALT_SSH:
|
||||
SETUP_KWARGS['data_files'][0][1].extend([
|
||||
'doc/man/salt-ssh.1',
|
||||
'doc/man/salt-run.1',
|
||||
'doc/man/salt-cloud.1',
|
||||
])
|
||||
@property
|
||||
def _property_install_requires(self):
|
||||
install_requires = _parse_requirements_file(SALT_REQS)
|
||||
|
||||
if IS_WINDOWS_PLATFORM is False:
|
||||
SETUP_KWARGS['cmdclass']['sdist'] = CloudSdist
|
||||
SETUP_KWARGS['cmdclass']['install_lib'] = InstallLib
|
||||
# SETUP_KWARGS['packages'].extend(['salt.cloud',
|
||||
# 'salt.cloud.clouds'])
|
||||
SETUP_KWARGS['package_data']['salt.cloud'] = ['deploy/*.sh']
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
install_requires.append('WMI')
|
||||
|
||||
SETUP_KWARGS['data_files'][0][1].extend([
|
||||
'doc/man/salt-master.1',
|
||||
'doc/man/salt-key.1',
|
||||
'doc/man/salt.1',
|
||||
'doc/man/salt-api.1',
|
||||
'doc/man/salt-syndic.1',
|
||||
'doc/man/salt-unity.1',
|
||||
])
|
||||
if PACKAGED_FOR_SALT_SSH is False:
|
||||
SETUP_KWARGS['data_files'][0][1].extend([
|
||||
'doc/man/salt-ssh.1',
|
||||
'doc/man/salt-run.1',
|
||||
'doc/man/salt-cloud.1',
|
||||
])
|
||||
if self.salt_transport == 'zeromq':
|
||||
install_requires += _parse_requirements_file(SALT_ZEROMQ_REQS)
|
||||
elif self.salt_transport == 'raet':
|
||||
install_requires += _parse_requirements_file(SALT_RAET_REQS)
|
||||
return install_requires
|
||||
|
||||
@property
|
||||
def _property_extras_require(self):
|
||||
if self.ssh_packaging:
|
||||
return {}
|
||||
return {'RAET': _parse_requirements_file(SALT_RAET_REQS),
|
||||
'Cloud': _parse_requirements_file(SALT_CLOUD_REQS)}
|
||||
|
||||
# bbfreeze explicit includes
|
||||
# Sometimes the auto module traversal doesn't find everything, so we
|
||||
# explicitly add it. The auto dependency tracking especially does not work for
|
||||
# imports occurring in salt.modules, as they are loaded at salt runtime.
|
||||
# Specifying includes that don't exist doesn't appear to cause a freezing
|
||||
# error.
|
||||
FREEZER_INCLUDES = [
|
||||
'zmq.core.*',
|
||||
'zmq.utils.*',
|
||||
'ast',
|
||||
'difflib',
|
||||
'distutils',
|
||||
'distutils.version',
|
||||
'numbers',
|
||||
'json',
|
||||
'M2Crypto',
|
||||
'Cookie',
|
||||
'asyncore',
|
||||
'fileinput',
|
||||
'sqlite3',
|
||||
'email',
|
||||
'email.mime.*',
|
||||
'requests',
|
||||
'sqlite3',
|
||||
]
|
||||
@property
|
||||
def _property_scripts(self):
|
||||
# Scripts common to all scenarios
|
||||
scripts = ['scripts/salt-call']
|
||||
if self.ssh_packaging or PACKAGED_FOR_SALT_SSH:
|
||||
scripts.append('scripts/salt-ssh')
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
return scripts
|
||||
scripts.extend(['scripts/salt-cloud', 'scripts/salt-run'])
|
||||
return scripts
|
||||
|
||||
if HAS_ZMQ and hasattr(zmq, 'pyzmq_version_info'):
|
||||
if HAS_ZMQ and zmq.pyzmq_version_info() >= (0, 14):
|
||||
# We're freezing, and when freezing ZMQ needs to be installed, so this
|
||||
# works fine
|
||||
if 'zmq.core.*' in FREEZER_INCLUDES:
|
||||
# For PyZMQ >= 0.14, freezing does not need 'zmq.core.*'
|
||||
FREEZER_INCLUDES.remove('zmq.core.*')
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
scripts.extend(['scripts/salt-cp',
|
||||
'scripts/salt-minion',
|
||||
'scripts/salt-unity'])
|
||||
return scripts
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
FREEZER_INCLUDES.extend([
|
||||
'win32api',
|
||||
'win32file',
|
||||
'win32con',
|
||||
'win32com',
|
||||
'win32net',
|
||||
'win32netcon',
|
||||
'win32gui',
|
||||
'win32security',
|
||||
'ntsecuritycon',
|
||||
'pywintypes',
|
||||
'pythoncom',
|
||||
'_winreg',
|
||||
'wmi',
|
||||
'site',
|
||||
'psutil',
|
||||
])
|
||||
SETUP_KWARGS['install_requires'].append('WMI')
|
||||
elif sys.platform.startswith('linux'):
|
||||
FREEZER_INCLUDES.append('spwd')
|
||||
try:
|
||||
import yum
|
||||
FREEZER_INCLUDES.append('yum')
|
||||
except ImportError:
|
||||
pass
|
||||
elif sys.platform.startswith('sunos'):
|
||||
# (The sledgehammer approach)
|
||||
# Just try to include everything
|
||||
# (This may be a better way to generate FREEZER_INCLUDES generally)
|
||||
try:
|
||||
from bbfreeze.modulegraph.modulegraph import ModuleGraph
|
||||
mf = ModuleGraph(sys.path[:])
|
||||
for arg in glob.glob('salt/modules/*.py'):
|
||||
mf.run_script(arg)
|
||||
for mod in mf.flatten():
|
||||
if type(mod).__name__ != 'Script' and mod.filename:
|
||||
FREEZER_INCLUDES.append(str(os.path.basename(mod.identifier)))
|
||||
except ImportError:
|
||||
pass
|
||||
# Include C extension that convinces esky to package up the libsodium C library
|
||||
# This is needed for ctypes to find it in libnacl which is in turn needed for raet
|
||||
# see pkg/smartos/esky/sodium_grabber{.c,_installer.py}
|
||||
FREEZER_INCLUDES.extend([
|
||||
'sodium_grabber',
|
||||
'ioflo',
|
||||
'raet',
|
||||
'libnacl',
|
||||
])
|
||||
# *nix, so, we need all scripts
|
||||
scripts.extend(['scripts/salt',
|
||||
'scripts/salt-api',
|
||||
'scripts/salt-cloud',
|
||||
'scripts/salt-cp',
|
||||
'scripts/salt-key',
|
||||
'scripts/salt-master',
|
||||
'scripts/salt-minion',
|
||||
'scripts/salt-run',
|
||||
'scripts/salt-ssh',
|
||||
'scripts/salt-syndic',
|
||||
'scripts/salt-unity'])
|
||||
return scripts
|
||||
|
||||
if HAS_ESKY:
|
||||
# if the user has the esky / bbfreeze libraries installed, add the
|
||||
# appropriate kwargs to setup
|
||||
OPTIONS = SETUP_KWARGS.get('options', {})
|
||||
OPTIONS['bdist_esky'] = {
|
||||
'freezer_module': 'bbfreeze',
|
||||
'freezer_options': {
|
||||
'includes': FREEZER_INCLUDES
|
||||
}
|
||||
}
|
||||
SETUP_KWARGS['options'] = OPTIONS
|
||||
# We don't actually need to set the console_scripts entry point since the
|
||||
# packaged scripts with do the same work
|
||||
#@property
|
||||
#def _property_entry_points(self):
|
||||
# return {}
|
||||
# # console scripts common to all scenarios
|
||||
# scripts = ['salt-call = salt.scripts:salt_call']
|
||||
# if self.ssh_packaging or PACKAGED_FOR_SALT_SSH:
|
||||
# scripts.append('salt-ssh = salt.scripts:salt_ssh')
|
||||
# if IS_WINDOWS_PLATFORM:
|
||||
# return {'console_scripts': scripts}
|
||||
# scripts.extend(['salt-cloud = salt.scripts:salt_cloud',
|
||||
# 'salt-run = salt.scripts:salt_run'])
|
||||
# return {'console_scripts': scripts}
|
||||
#
|
||||
# if IS_WINDOWS_PLATFORM:
|
||||
# scripts.extend(['salt-cp = salt.scripts:salt_cp'
|
||||
# 'salt-minion = salt.scripts:salt_minion',
|
||||
# 'salt-unity = salt.scripts:salt_unity'])
|
||||
# return {'console_scripts': scripts}
|
||||
#
|
||||
# # *nix, so, we need all scripts
|
||||
# scripts.extend(['salt = salt.scripts:salt_main',
|
||||
# 'salt-api = salt.scripts:salt_api',
|
||||
# 'salt-cloud = salt.scripts:salt_cloud',
|
||||
# 'salt-cp = salt.scripts:salt_cp',
|
||||
# 'salt-key = salt.scripts:salt_key',
|
||||
# 'salt-master = salt.scripts:salt_master',
|
||||
# 'salt-minion = salt.scripts:salt_minion',
|
||||
# 'salt-run = salt.scripts:salt_run',
|
||||
# 'salt-ssh = salt.scripts:salt_ssh',
|
||||
# 'salt-syndic = salt.scripts:salt_syndic',
|
||||
# 'salt-unity = salt.scripts:salt_unity'])
|
||||
# return {'console_scripts': scripts}
|
||||
# <---- Dynamic Data ---------------------------------------------------------------------------------------------
|
||||
|
||||
if WITH_SETUPTOOLS:
|
||||
SETUP_KWARGS['cmdclass']['egg_info'] = EggInfo
|
||||
if PACKAGED_FOR_SALT_SSH is False:
|
||||
SETUP_KWARGS['entry_points'] = {
|
||||
'console_scripts': ['salt-call = salt.scripts:salt_call',
|
||||
'salt-cp = salt.scripts:salt_cp',
|
||||
'salt-minion = salt.scripts:salt_minion',
|
||||
]
|
||||
}
|
||||
else:
|
||||
SETUP_KWARGS['entry_points'] = {'console_scripts': [
|
||||
'salt-ssh = salt.scripts:salt_ssh',
|
||||
'salt-run = salt.scripts:salt_run',
|
||||
'salt-call = salt.scripts:salt_call',
|
||||
'salt-cloud = salt.scripts:salt_cloud',
|
||||
]}
|
||||
if IS_WINDOWS_PLATFORM is False:
|
||||
if PACKAGED_FOR_SALT_SSH:
|
||||
SETUP_KWARGS['entry_points']['console_scripts'].extend([
|
||||
'salt = salt.scripts:salt_main',
|
||||
'salt-api = salt.scripts:salt_api',
|
||||
'salt-key = salt.scripts:salt_key',
|
||||
'salt-master = salt.scripts:salt_master',
|
||||
'salt-syndic = salt.scripts:salt_syndic',
|
||||
])
|
||||
else:
|
||||
SETUP_KWARGS['entry_points']['console_scripts'].extend([
|
||||
'salt = salt.scripts:salt_main',
|
||||
'salt-api = salt.scripts:salt_api',
|
||||
'salt-cloud = salt.scripts:salt_cloud',
|
||||
'salt-key = salt.scripts:salt_key',
|
||||
'salt-master = salt.scripts:salt_master',
|
||||
'salt-run = salt.scripts:salt_run',
|
||||
'salt-ssh = salt.scripts:salt_ssh',
|
||||
'salt-syndic = salt.scripts:salt_syndic',
|
||||
])
|
||||
# ----- Esky Setup ---------------------------------------------------------------------------------------------->
|
||||
def setup_esky(self):
|
||||
opt_dict = self.get_option_dict('bdist_esky')
|
||||
opt_dict['freezer_module'] = ('setup script', 'bbfreeze')
|
||||
opt_dict['freezer_options'] = ('setup script', {'includes': self.get_esky_freezer_includes()})
|
||||
|
||||
# Required for running the tests suite
|
||||
SETUP_KWARGS['dependency_links'] = [
|
||||
'https://github.com/saltstack/salt-testing/tarball/develop#egg=SaltTesting'
|
||||
]
|
||||
SETUP_KWARGS['tests_require'] = ['SaltTesting']
|
||||
@property
|
||||
def _property_freezer_options(self):
|
||||
return {'includes': self.get_esky_freezer_includes()}
|
||||
|
||||
# When WITH_SETUPTOOLS is True, esky builds would fail to include the scripts,
|
||||
# and, if WITH_SETUPTOOLS is True, having scripts and console_scripts defined
|
||||
# does not, apparently, break the build, so, let's have both
|
||||
if PACKAGED_FOR_SALT_SSH is False:
|
||||
SETUP_KWARGS['scripts'] = ['scripts/salt-call',
|
||||
'scripts/salt-cp',
|
||||
'scripts/salt-minion',
|
||||
'scripts/salt-unity',
|
||||
]
|
||||
|
||||
if IS_WINDOWS_PLATFORM is False:
|
||||
if PACKAGED_FOR_SALT_SSH:
|
||||
SETUP_KWARGS['scripts'] = [
|
||||
'scripts/salt-ssh',
|
||||
'scripts/salt-run',
|
||||
'scripts/salt-call',
|
||||
'scripts/salt-cloud'
|
||||
def get_esky_freezer_includes(self):
|
||||
# Sometimes the auto module traversal doesn't find everything, so we
|
||||
# explicitly add it. The auto dependency tracking especially does not work for
|
||||
# imports occurring in salt.modules, as they are loaded at salt runtime.
|
||||
# Specifying includes that don't exist doesn't appear to cause a freezing
|
||||
# error.
|
||||
freezer_includes = [
|
||||
'zmq.core.*',
|
||||
'zmq.utils.*',
|
||||
'ast',
|
||||
'difflib',
|
||||
'distutils',
|
||||
'distutils.version',
|
||||
'numbers',
|
||||
'json',
|
||||
'M2Crypto',
|
||||
'Cookie',
|
||||
'asyncore',
|
||||
'fileinput',
|
||||
'sqlite3',
|
||||
'email',
|
||||
'email.mime.*',
|
||||
'requests',
|
||||
'sqlite3',
|
||||
]
|
||||
else:
|
||||
SETUP_KWARGS['scripts'].extend([
|
||||
'scripts/salt',
|
||||
'scripts/salt-api',
|
||||
'scripts/salt-cloud',
|
||||
'scripts/salt-key',
|
||||
'scripts/salt-master',
|
||||
'scripts/salt-run',
|
||||
'scripts/salt-ssh',
|
||||
'scripts/salt-syndic',
|
||||
])
|
||||
if HAS_ZMQ and hasattr(zmq, 'pyzmq_version_info'):
|
||||
if HAS_ZMQ and zmq.pyzmq_version_info() >= (0, 14):
|
||||
# We're freezing, and when freezing ZMQ needs to be installed, so this
|
||||
# works fine
|
||||
if 'zmq.core.*' in freezer_includes:
|
||||
# For PyZMQ >= 0.14, freezing does not need 'zmq.core.*'
|
||||
freezer_includes.remove('zmq.core.*')
|
||||
|
||||
if IS_WINDOWS_PLATFORM:
|
||||
freezer_includes.extend([
|
||||
'win32api',
|
||||
'win32file',
|
||||
'win32con',
|
||||
'win32com',
|
||||
'win32net',
|
||||
'win32netcon',
|
||||
'win32gui',
|
||||
'win32security',
|
||||
'ntsecuritycon',
|
||||
'pywintypes',
|
||||
'pythoncom',
|
||||
'_winreg',
|
||||
'wmi',
|
||||
'site',
|
||||
'psutil',
|
||||
])
|
||||
elif sys.platform.startswith('linux'):
|
||||
freezer_includes.append('spwd')
|
||||
try:
|
||||
import yum # pylint: disable=unused-variable
|
||||
freezer_includes.append('yum')
|
||||
except ImportError:
|
||||
pass
|
||||
elif sys.platform.startswith('sunos'):
|
||||
# (The sledgehammer approach)
|
||||
# Just try to include everything
|
||||
# (This may be a better way to generate freezer_includes generally)
|
||||
try:
|
||||
from bbfreeze.modulegraph.modulegraph import ModuleGraph
|
||||
mgraph = ModuleGraph(sys.path[:])
|
||||
for arg in glob.glob('salt/modules/*.py'):
|
||||
mgraph.run_script(arg)
|
||||
for mod in mgraph.flatten():
|
||||
if type(mod).__name__ != 'Script' and mod.filename:
|
||||
freezer_includes.append(str(os.path.basename(mod.identifier)))
|
||||
except ImportError:
|
||||
pass
|
||||
# Include C extension that convinces esky to package up the libsodium C library
|
||||
# This is needed for ctypes to find it in libnacl which is in turn needed for raet
|
||||
# see pkg/smartos/esky/sodium_grabber{.c,_installer.py}
|
||||
freezer_includes.extend([
|
||||
'sodium_grabber',
|
||||
'ioflo',
|
||||
'raet',
|
||||
'libnacl',
|
||||
])
|
||||
return freezer_includes
|
||||
# <---- Esky Setup -----------------------------------------------------------------------------------------------
|
||||
|
||||
if PACKAGED_FOR_SALT_SSH:
|
||||
SETUP_KWARGS.pop('extras_require')
|
||||
for requirement in _parse_requirements_file(SALT_ZEROMQ_REQS):
|
||||
if requirement not in SETUP_KWARGS['install_requires']:
|
||||
continue
|
||||
SETUP_KWARGS['install_requires'].remove(requirement)
|
||||
# ----- Overridden Methods -------------------------------------------------------------------------------------->
|
||||
def parse_command_line(self):
|
||||
args = distutils.dist.Distribution.parse_command_line(self)
|
||||
|
||||
# Setup our property functions after class initialization and
|
||||
# after parsing the command line since most are set to None
|
||||
for funcname in dir(self):
|
||||
if not funcname.startswith('_property_'):
|
||||
continue
|
||||
property_name = funcname.split('_property_', 1)[-1]
|
||||
setattr(self, property_name, getattr(self, funcname))
|
||||
|
||||
if not self.ssh_packaging and PACKAGED_FOR_SALT_SSH:
|
||||
self.ssh_packaging = 1
|
||||
|
||||
if self.ssh_packaging:
|
||||
self.metadata.name = 'salt-ssh'
|
||||
self.salt_transport = 'ssh'
|
||||
elif self.salt_transport is None:
|
||||
self.salt_transport = 'zeromq'
|
||||
|
||||
if self.salt_transport not in ('zeromq', 'raet', 'both', 'ssh', 'none'):
|
||||
raise DistutilsArgError(
|
||||
'The value of --salt-transport needs be \'zeromq\', '
|
||||
'\'raet\', \'both\', \'ssh\' or \'none\' not {0!r}'.format(
|
||||
self.salt_transport
|
||||
)
|
||||
)
|
||||
|
||||
return args
|
||||
# <---- Overridden Methods ---------------------------------------------------------------------------------------
|
||||
|
||||
# <---- Custom Distribution Class ------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
setup(**SETUP_KWARGS)
|
||||
setup(distclass=SaltDistribution)
|
||||
|
|
Loading…
Add table
Reference in a new issue