Merge pull request #55694 from garethgreenaway/55662_s3_pillar_fixes

[master] S3 Pillar Pagination Fixes
This commit is contained in:
Daniel Wozniak 2020-01-08 09:10:24 -07:00 committed by GitHub
commit 35230041f8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 125 additions and 3 deletions

View file

@ -273,7 +273,13 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
'''
# helper s3 query function
def __get_s3_meta():
def __get_s3_meta(continuation_token=None):
# We want to use ListObjectsV2 so we get the NextContinuationToken
params = {'prefix': prefix, 'list-type': 2}
if continuation_token:
params['continuation-token'] = continuation_token
return __utils__['s3.query'](
key=creds.key,
keyid=creds.keyid,
@ -283,7 +289,7 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
verify_ssl=creds.verify_ssl,
location=creds.location,
return_bin=False,
params={'prefix': prefix},
params=params,
path_style=creds.path_style,
https_enable=creds.https_enable)
@ -296,6 +302,12 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
environments = [(os.path.dirname(k['Key']).split('/', 1))[0] for k in files]
return set(environments)
def __get_continuation_token(s3_meta):
return next((item.get('NextContinuationToken')
for item in s3_meta
if item.get('NextContinuationToken')),
None)
log.debug('Refreshing S3 buckets pillar cache file')
metadata = {}
@ -312,6 +324,14 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
if s3_meta:
bucket_files[bucket] = __get_pillar_files_from_s3_meta(s3_meta)
# Check if we have a NextContinuationToken and loop until we don't
while True:
continuation_token = __get_continuation_token(s3_meta)
if not continuation_token:
break
s3_meta = __get_s3_meta(continuation_token)
bucket_files[bucket] += __get_pillar_files_from_s3_meta(s3_meta)
metadata[environment] = bucket_files
else:
@ -322,6 +342,15 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
# s3 query returned data
if s3_meta:
files = __get_pillar_files_from_s3_meta(s3_meta)
# Check if we have a NextContinuationToken and loop until we don't
while True:
continuation_token = __get_continuation_token(s3_meta)
if not continuation_token:
break
s3_meta = __get_s3_meta(continuation_token)
files += __get_pillar_files_from_s3_meta(s3_meta)
environments = __get_pillar_environments(files)
# pull out the files for the environment
@ -343,7 +372,7 @@ def _refresh_buckets_cache_file(creds, cache_file, multiple_env, environment, pr
log.debug('Writing S3 buckets pillar cache file')
with salt.utils.files.fopen(cache_file, 'w') as fp_:
with salt.utils.files.fopen(cache_file, 'wb') as fp_:
pickle.dump(metadata, fp_)
return metadata

View file

@ -0,0 +1,93 @@
# -*- coding: utf-8 -*-
# Import python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
# Import Salt Testing libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
from tests.support.mock import patch, mock_open, MagicMock
# Import Salt Libs
import salt.pillar.s3 as s3_pillar
from salt.ext.six.moves import range
log = logging.getLogger(__name__)
class S3PillarTestCase(TestCase, LoaderModuleMockMixin):
'''
TestCase for salt.pillar.s3
'''
def setup_loader_modules(self):
s3_pillar_globals = {
'__utils__': {}
}
return {s3_pillar: s3_pillar_globals}
def test_refresh_buckets_cache_file(self):
'''
Test pagination with refresh_buckets_cache_file
'''
key = 'XXXXXXXXXXXXXXXXXXXXX'
keyid = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
bucket = 'dummy_bucket'
service_url = 's3.amazonaws.com'
cache_file = 'dummy_file'
s3_creds = s3_pillar.S3Credentials(key, keyid, bucket, service_url)
mock_return_first = [{'Name': 'pillar-bucket'},
{'Prefix': 'test'},
{'KeyCount': '10'},
{'MaxKeys': '10'},
{'NextContinuationToken': 'XXXXX'},
{'IsTruncated': 'true'}]
mock_return_second = [{'Name': 'pillar-bucket'},
{'Prefix': 'test'},
{'KeyCount': '10'},
{'MaxKeys': '10'},
{'IsTruncated': 'true'}]
first_range_end = 999
second_range_end = 1200
for i in range(0, first_range_end):
key_name = '{0}/init.sls'.format(i)
tmp = {'Key': key_name,
'LastModified': '2019-12-18T15:54:39.000Z',
'ETag': '"fba0a053704e8b357c94be90b44bb640"',
'Size': '5 ',
'StorageClass': 'STANDARD'}
mock_return_first.append(tmp)
for i in range(first_range_end, second_range_end):
key_name = '{0}/init.sls'.format(i)
tmp = {'Key': key_name,
'LastModified': '2019-12-18T15:54:39.000Z',
'ETag': '"fba0a053704e8b357c94be90b44bb640"',
'Size': '5 ',
'StorageClass': 'STANDARD'}
mock_return_second.append(tmp)
_expected = {'base': {'dummy_bucket': []}}
for i in range(0, second_range_end):
key_name = '{0}/init.sls'.format(i)
tmp = {'Key': key_name,
'LastModified': '2019-12-18T15:54:39.000Z',
'ETag': '"fba0a053704e8b357c94be90b44bb640"',
'Size': '5 ',
'StorageClass': 'STANDARD'}
_expected['base']['dummy_bucket'].append(tmp)
mock_s3_query = MagicMock(side_effect=[mock_return_first, mock_return_second])
with patch.dict(s3_pillar.__utils__, {'s3.query': mock_s3_query}):
with patch('salt.utils.files.fopen', mock_open(read_data=b'')):
ret = s3_pillar._refresh_buckets_cache_file(s3_creds,
cache_file,
False,
'base',
'')
self.assertEqual(ret, _expected)