mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
- fixed pylint warnings
This commit is contained in:
parent
a3b10e8ab1
commit
8219acffe7
1 changed files with 5 additions and 6 deletions
|
@ -100,7 +100,6 @@ import salt.utils.s3 as s3
|
|||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
||||
class S3Credentials(object):
|
||||
def __init__(self, key, keyid, bucket, service_url, verify_ssl,
|
||||
location):
|
||||
|
@ -123,8 +122,8 @@ def ext_pillar(minion_id,
|
|||
environment='base',
|
||||
prefix='',
|
||||
service_url=None,
|
||||
s3_cache_expire=30, # cache for 30 seconds
|
||||
s3_sync_on_update=True): # sync cache on update rather than jit
|
||||
s3_cache_expire=30, # cache for 30 seconds
|
||||
s3_sync_on_update=True): # sync cache on update rather than jit
|
||||
|
||||
'''
|
||||
Execute a command and read the output as YAML
|
||||
|
@ -182,11 +181,11 @@ def _init(creds, bucket, multiple_env, environment, prefix, s3_cache_expire):
|
|||
|
||||
# check mtime of the buckets files cache
|
||||
if os.path.isfile(cache_file) and cache_file_mtime > exp:
|
||||
log.debug("S3 bucket cache file %s is not expired, mtime_diff=%ss, expiration=%ss" % (cache_file, cache_file_mtime - exp, s3_cache_expire))
|
||||
log.debug("S3 bucket cache file {0} is not expired, mtime_diff={1}s, expiration={2}s".format(cache_file, cache_file_mtime - exp, s3_cache_expire))
|
||||
return _read_buckets_cache_file(cache_file)
|
||||
else:
|
||||
# bucket files cache expired
|
||||
log.debug("S3 bucket cache file %s is expired, mtime_diff=%ss, expiration=%ss" % (cache_file, cache_file_mtime - exp, s3_cache_expire))
|
||||
log.debug("S3 bucket cache file {0} is expired, mtime_diff={1}s, expiration={2}s".format(cache_file, cache_file_mtime - exp, s3_cache_expire))
|
||||
return _refresh_buckets_cache_file(creds, cache_file, multiple_env,
|
||||
environment, prefix)
|
||||
|
||||
|
@ -373,7 +372,7 @@ def _get_file_from_s3(creds, metadata, saltenv, bucket, path,
|
|||
|
||||
cached_md5 = salt.utils.get_hash(cached_file_path, 'md5')
|
||||
|
||||
log.debug("Cached file: path=%s, md5=%s, etag=%s" % (cached_file_path, cached_md5, file_md5))
|
||||
log.debug("Cached file: path={0}, md5={1}, etag={2}".format(cached_file_path, cached_md5, file_md5))
|
||||
|
||||
# hashes match we have a cache hit
|
||||
if cached_md5 == file_md5:
|
||||
|
|
Loading…
Add table
Reference in a new issue