mirror of
https://github.com/saltstack/salt.git
synced 2025-04-17 10:10:20 +00:00
Replace os.walk with a helper function
On PY2, when os.walk is invoked with a str as input, the paths in the return data are all str types as well. This leaves undecoded unicode data in those strings when files/dirs under the top dir that was passed contain unicode characters in the filename. >>> import os >>> list(os.walk('temp')) [('temp', [], ['\xd0\x94.txt', 'foo.txt'])] >>> list(os.walk(u'temp')) [(u'temp', [], [u'\u0414.txt', u'foo.txt'])] The helper introduced here ensures that we always invoke os.walk with a unicode top-level dir, so that we get unicode types in the return data.
This commit is contained in:
parent
b6725536f8
commit
6d52aeb739
49 changed files with 130 additions and 75 deletions
|
@ -16,6 +16,7 @@ from contextlib import closing
|
|||
import salt.client.ssh.shell
|
||||
import salt.client.ssh
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.thin
|
||||
import salt.utils.url
|
||||
import salt.utils.verify
|
||||
|
@ -241,7 +242,7 @@ def prep_trans_tar(opts, file_client, chunks, file_refs, pillar=None, id_=None,
|
|||
cwd = None
|
||||
os.chdir(gendir)
|
||||
with closing(tarfile.open(trans_tar, 'w:gz')) as tfp:
|
||||
for root, dirs, files in os.walk(gendir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(gendir):
|
||||
for name in files:
|
||||
full = os.path.join(root, name)
|
||||
tfp.add(full[len(gendir):].lstrip(os.sep))
|
||||
|
|
|
@ -38,6 +38,7 @@ import salt.utils.minions
|
|||
import salt.utils.gzip_util
|
||||
import salt.utils.jid
|
||||
import salt.utils.minions
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.user
|
||||
|
@ -144,7 +145,7 @@ def clean_pub_auth(opts):
|
|||
if not os.path.exists(auth_cache):
|
||||
return
|
||||
else:
|
||||
for (dirpath, dirnames, filenames) in os.walk(auth_cache):
|
||||
for (dirpath, dirnames, filenames) in salt.utils.path.os_walk(auth_cache):
|
||||
for auth_file in filenames:
|
||||
auth_file_path = os.path.join(dirpath, auth_file)
|
||||
if not os.path.isfile(auth_file_path):
|
||||
|
@ -334,7 +335,7 @@ class AutoKey(object):
|
|||
expire_minutes = self.opts.get('autosign_timeout', 120)
|
||||
if expire_minutes > 0:
|
||||
min_time = time.time() - (60 * int(expire_minutes))
|
||||
for root, dirs, filenames in os.walk(autosign_dir):
|
||||
for root, dirs, filenames in salt.utils.path.os_walk(autosign_dir):
|
||||
for f in filenames:
|
||||
stub_file = os.path.join(autosign_dir, f)
|
||||
mtime = os.path.getmtime(stub_file)
|
||||
|
|
|
@ -124,7 +124,7 @@ class Client(object):
|
|||
|
||||
filelist = set()
|
||||
|
||||
for root, dirs, files in os.walk(destdir, followlinks=True):
|
||||
for root, dirs, files in salt.utils.path.os_walk(destdir, followlinks=True):
|
||||
for name in files:
|
||||
path = os.path.join(root, name)
|
||||
filelist.add(path)
|
||||
|
@ -359,7 +359,7 @@ class Client(object):
|
|||
)
|
||||
return states
|
||||
for path in self.opts['file_roots'][saltenv]:
|
||||
for root, dirs, files in os.walk(path, topdown=True):
|
||||
for root, dirs, files in salt.utils.path.os_walk(path, topdown=True):
|
||||
log.debug(
|
||||
'Searching for states in dirs %s and files %s',
|
||||
dirs, files
|
||||
|
@ -893,7 +893,7 @@ class LocalClient(Client):
|
|||
return ret
|
||||
prefix = prefix.strip('/')
|
||||
for path in self.opts['file_roots'][saltenv]:
|
||||
for root, dirs, files in os.walk(
|
||||
for root, dirs, files in salt.utils.path.os_walk(
|
||||
os.path.join(path, prefix), followlinks=True
|
||||
):
|
||||
# Don't walk any directories that match file_ignore_regex or glob
|
||||
|
@ -913,7 +913,7 @@ class LocalClient(Client):
|
|||
if saltenv not in self.opts['file_roots']:
|
||||
return ret
|
||||
for path in self.opts['file_roots'][saltenv]:
|
||||
for root, dirs, files in os.walk(
|
||||
for root, dirs, files in salt.utils.path.os_walk(
|
||||
os.path.join(path, prefix), followlinks=True
|
||||
):
|
||||
# Don't walk any directories that match file_ignore_regex or glob
|
||||
|
@ -932,7 +932,7 @@ class LocalClient(Client):
|
|||
return ret
|
||||
prefix = prefix.strip('/')
|
||||
for path in self.opts['file_roots'][saltenv]:
|
||||
for root, dirs, files in os.walk(
|
||||
for root, dirs, files in salt.utils.path.os_walk(
|
||||
os.path.join(path, prefix), followlinks=True
|
||||
):
|
||||
ret.append(sdecode(os.path.relpath(root, path)))
|
||||
|
|
|
@ -17,6 +17,7 @@ import time
|
|||
import salt.loader
|
||||
import salt.utils.files
|
||||
import salt.utils.locales
|
||||
import salt.utils.path
|
||||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
from salt.utils.args import get_function_argspec as _argspec
|
||||
|
@ -182,7 +183,7 @@ def generate_mtime_map(opts, path_map):
|
|||
file_map = {}
|
||||
for saltenv, path_list in six.iteritems(path_map):
|
||||
for path in path_list:
|
||||
for directory, dirnames, filenames in os.walk(path):
|
||||
for directory, dirnames, filenames in salt.utils.path.os_walk(path):
|
||||
# Don't walk any directories that match file_ignore_regex or glob
|
||||
dirnames[:] = [d for d in dirnames if not is_file_ignored(opts, d)]
|
||||
for item in filenames:
|
||||
|
@ -225,7 +226,7 @@ def reap_fileserver_cache_dir(cache_base, find_func):
|
|||
'''
|
||||
for saltenv in os.listdir(cache_base):
|
||||
env_base = os.path.join(cache_base, saltenv)
|
||||
for root, dirs, files in os.walk(env_base):
|
||||
for root, dirs, files in salt.utils.path.os_walk(env_base):
|
||||
# if we have an empty directory, lets cleanup
|
||||
# This will only remove the directory on the second time
|
||||
# "_reap_cache" is called (which is intentional)
|
||||
|
|
|
@ -207,7 +207,7 @@ def update():
|
|||
# Walk the cache directory searching for deletions
|
||||
blob_names = [blob.name for blob in blob_list]
|
||||
blob_set = set(blob_names)
|
||||
for root, dirs, files in os.walk(path):
|
||||
for root, dirs, files in salt.utils.path.os_walk(path):
|
||||
for f in files:
|
||||
fname = os.path.join(root, f)
|
||||
relpath = os.path.relpath(fname, path)
|
||||
|
|
|
@ -34,6 +34,7 @@ import salt.fileserver
|
|||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
|
@ -275,7 +276,7 @@ def file_list(load):
|
|||
continue
|
||||
walk_dir = os.path.join(minion_files_dir, prefix)
|
||||
# Do not follow links for security reasons
|
||||
for root, _, files in os.walk(walk_dir, followlinks=False):
|
||||
for root, _, files in salt.utils.path.os_walk(walk_dir, followlinks=False):
|
||||
for fname in files:
|
||||
# Ignore links for security reasons
|
||||
if os.path.islink(os.path.join(root, fname)):
|
||||
|
@ -354,7 +355,7 @@ def dir_list(load):
|
|||
continue
|
||||
walk_dir = os.path.join(minion_files_dir, prefix)
|
||||
# Do not follow links for security reasons
|
||||
for root, _, _ in os.walk(walk_dir, followlinks=False):
|
||||
for root, _, _ in salt.utils.path.os_walk(walk_dir, followlinks=False):
|
||||
relpath = os.path.relpath(root, minion_files_dir)
|
||||
# Ensure that the current directory and directories outside of
|
||||
# the minion dir do not end up in return list
|
||||
|
|
|
@ -386,8 +386,8 @@ def _file_lists(load, form):
|
|||
ret['links'][rel_path] = link_dest
|
||||
|
||||
for path in __opts__['file_roots'][load['saltenv']]:
|
||||
for root, dirs, files in os.walk(
|
||||
salt.utils.stringutils.to_unicode(path),
|
||||
for root, dirs, files in salt.utils.path.os_walk(
|
||||
path,
|
||||
followlinks=__opts__['fileserver_followsymlinks']):
|
||||
_add_to(ret['dirs'], path, root, dirs)
|
||||
_add_to(ret['files'], path, root, files)
|
||||
|
|
|
@ -58,6 +58,7 @@ import salt.utils.data
|
|||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
|
@ -754,7 +755,7 @@ def _file_lists(load, form):
|
|||
# svnfs root (global or per-remote) does not exist in env
|
||||
continue
|
||||
|
||||
for root, dirs, files in os.walk(env_root):
|
||||
for root, dirs, files in salt.utils.path.os_walk(env_root):
|
||||
relpath = os.path.relpath(root, env_root)
|
||||
dir_rel_fn = os.path.join(repo['mountpoint'], relpath)
|
||||
if relpath != '.':
|
||||
|
|
|
@ -797,7 +797,7 @@ def zip_(zip_file, sources, template=None, cwd=None, runas=None):
|
|||
else:
|
||||
rel_root = cwd if cwd is not None else '/'
|
||||
if os.path.isdir(src):
|
||||
for dir_name, sub_dirs, files in os.walk(src):
|
||||
for dir_name, sub_dirs, files in salt.utils.path.os_walk(src):
|
||||
if cwd and dir_name.startswith(cwd):
|
||||
arc_dir = os.path.relpath(dir_name, cwd)
|
||||
else:
|
||||
|
|
|
@ -71,7 +71,7 @@ def uuid(dev=None):
|
|||
try:
|
||||
if dev is None:
|
||||
# take the only directory in /sys/fs/bcache and return it's basename
|
||||
return list(os.walk('/sys/fs/bcache/'))[0][1][0]
|
||||
return list(salt.utils.path.os_walk('/sys/fs/bcache/'))[0][1][0]
|
||||
else:
|
||||
# basename of the /sys/block/{dev}/bcache/cache symlink target
|
||||
return os.path.basename(_bcsys(dev, 'cache'))
|
||||
|
@ -425,12 +425,12 @@ def status(stats=False, config=False, internals=False, superblock=False, alldevs
|
|||
:param superblock: include superblock
|
||||
'''
|
||||
bdevs = []
|
||||
for _, links, _ in os.walk('/sys/block/'):
|
||||
for _, links, _ in salt.utils.path.os_walk('/sys/block/'):
|
||||
for block in links:
|
||||
if 'bcache' in block:
|
||||
continue
|
||||
|
||||
for spath, sdirs, _ in os.walk('/sys/block/{0}'.format(block), followlinks=False):
|
||||
for spath, sdirs, _ in salt.utils.path.os_walk('/sys/block/{0}'.format(block), followlinks=False):
|
||||
if 'bcache' in sdirs:
|
||||
bdevs.append(os.path.basename(spath))
|
||||
statii = {}
|
||||
|
|
|
@ -17,6 +17,7 @@ import salt.fileclient
|
|||
import salt.utils.files
|
||||
import salt.utils.gzip_util
|
||||
import salt.utils.locales
|
||||
import salt.utils.path
|
||||
import salt.utils.templates
|
||||
import salt.utils.url
|
||||
import salt.crypt
|
||||
|
@ -875,7 +876,7 @@ def push_dir(path, glob=None, upload_path=None):
|
|||
return push(path, upload_path=upload_path)
|
||||
else:
|
||||
filelist = []
|
||||
for root, _, files in os.walk(path):
|
||||
for root, _, files in salt.utils.path.os_walk(path):
|
||||
filelist += [os.path.join(root, tmpfile) for tmpfile in files]
|
||||
if glob is not None:
|
||||
filelist = [fi for fi in filelist if fnmatch.fnmatch(os.path.basename(fi), glob)]
|
||||
|
|
|
@ -6379,7 +6379,7 @@ def diskusage(path):
|
|||
ret = stat_structure.st_size
|
||||
return ret
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(path):
|
||||
for dirpath, dirnames, filenames in salt.utils.path.os_walk(path):
|
||||
for f in filenames:
|
||||
fp = os.path.join(dirpath, f)
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import logging
|
|||
# Import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
from salt.ext.six import string_types
|
||||
from salt.exceptions import SaltInvocationError, CommandExecutionError
|
||||
from salt.ext import six
|
||||
|
@ -470,7 +471,7 @@ def list_all():
|
|||
'''
|
||||
if 'ports.list_all' not in __context__:
|
||||
__context__['ports.list_all'] = []
|
||||
for path, dirs, files in os.walk('/usr/ports'):
|
||||
for path, dirs, files in salt.utils.path.os_walk('/usr/ports'):
|
||||
stripped = path[len('/usr/ports'):]
|
||||
if stripped.count('/') != 2 or stripped.endswith('/CVS'):
|
||||
continue
|
||||
|
|
|
@ -11,6 +11,7 @@ import logging
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -133,7 +134,7 @@ def available():
|
|||
# Strip .ko from the basename
|
||||
ret.append(os.path.basename(line)[:-4])
|
||||
|
||||
for root, dirs, files in os.walk(mod_dir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(mod_dir):
|
||||
for fn_ in files:
|
||||
if '.ko' in fn_:
|
||||
ret.append(fn_[:fn_.index('.ko')].replace('-', '_'))
|
||||
|
@ -141,7 +142,7 @@ def available():
|
|||
if 'Arch' in __grains__['os_family']:
|
||||
# Sadly this path is relative to kernel major version but ignores minor version
|
||||
mod_dir_arch = '/lib/modules/extramodules-' + os.uname()[2][0:3] + '-ARCH'
|
||||
for root, dirs, files in os.walk(mod_dir_arch):
|
||||
for root, dirs, files in salt.utils.path.os_walk(mod_dir_arch):
|
||||
for fn_ in files:
|
||||
if '.ko' in fn_:
|
||||
ret.append(fn_[:fn_.index('.ko')].replace('-', '_'))
|
||||
|
|
|
@ -21,6 +21,7 @@ import re
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.decorators as decorators
|
||||
|
@ -78,7 +79,7 @@ def _available_services():
|
|||
'''
|
||||
available_services = dict()
|
||||
for launch_dir in _launchd_paths():
|
||||
for root, dirs, files in os.walk(launch_dir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(launch_dir):
|
||||
for filename in files:
|
||||
file_path = os.path.join(root, filename)
|
||||
# Follow symbolic links of files in _launchd_paths
|
||||
|
|
|
@ -72,7 +72,7 @@ def _available_services():
|
|||
'''
|
||||
available_services = dict()
|
||||
for launch_dir in _launchd_paths():
|
||||
for root, dirs, files in os.walk(launch_dir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(launch_dir):
|
||||
for file_name in files:
|
||||
|
||||
# Must be a plist file
|
||||
|
|
|
@ -12,6 +12,7 @@ import os
|
|||
# import salt libs
|
||||
import salt.utils.data
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.mac_utils
|
||||
import salt.utils.platform
|
||||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
|
@ -330,7 +331,7 @@ def list_downloads():
|
|||
salt '*' softwareupdate.list_downloads
|
||||
'''
|
||||
outfiles = []
|
||||
for root, subFolder, files in os.walk('/Library/Updates'):
|
||||
for root, subFolder, files in salt.utils.path.os_walk('/Library/Updates'):
|
||||
for f in files:
|
||||
outfiles.append(os.path.join(root, f))
|
||||
|
||||
|
|
|
@ -11,6 +11,7 @@ import shutil
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
||||
# Import third party libs
|
||||
from salt.ext import six
|
||||
|
@ -170,7 +171,7 @@ def _unify_keywords():
|
|||
old_path = BASE_PATH.format('keywords')
|
||||
if os.path.exists(old_path):
|
||||
if os.path.isdir(old_path):
|
||||
for triplet in os.walk(old_path):
|
||||
for triplet in salt.utils.path.os_walk(old_path):
|
||||
for file_name in triplet[2]:
|
||||
file_path = '{0}/{1}'.format(triplet[0], file_name)
|
||||
with salt.utils.files.fopen(file_path) as fh_:
|
||||
|
@ -218,7 +219,7 @@ def _package_conf_ordering(conf, clean=True, keep_backup=False):
|
|||
|
||||
backup_files = []
|
||||
|
||||
for triplet in os.walk(path):
|
||||
for triplet in salt.utils.path.os_walk(path):
|
||||
for file_name in triplet[2]:
|
||||
file_path = '{0}/{1}'.format(triplet[0], file_name)
|
||||
cp = triplet[0][len(path) + 1:] + '/' + file_name
|
||||
|
@ -263,7 +264,7 @@ def _package_conf_ordering(conf, clean=True, keep_backup=False):
|
|||
pass
|
||||
|
||||
if clean:
|
||||
for triplet in os.walk(path):
|
||||
for triplet in salt.utils.path.os_walk(path):
|
||||
if len(triplet[1]) == 0 and len(triplet[2]) == 0 and \
|
||||
triplet[0] != path:
|
||||
shutil.rmtree(triplet[0])
|
||||
|
|
|
@ -20,6 +20,7 @@ import sys
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
||||
HAS_PSUTIL = False
|
||||
try:
|
||||
|
@ -158,7 +159,7 @@ def _deleted_files():
|
|||
if os.path.isfile(readlink):
|
||||
filenames.append(readlink)
|
||||
elif os.path.isdir(readlink) and readlink != '/':
|
||||
for root, dummy_dirs, files in os.walk(readlink, followlinks=True):
|
||||
for root, dummy_dirs, files in salt.utils.path.os_walk(readlink, followlinks=True):
|
||||
for name in files:
|
||||
filenames.append(os.path.join(root, name))
|
||||
|
||||
|
|
|
@ -54,7 +54,8 @@ import time
|
|||
from json import loads, dumps
|
||||
import yaml
|
||||
try:
|
||||
import salt.utils
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.client
|
||||
import salt.exceptions
|
||||
except ImportError:
|
||||
|
@ -581,8 +582,8 @@ class StateTestLoader(object):
|
|||
log.info("gather_files: {}".format(time.time()))
|
||||
filepath = filepath + os.sep + 'saltcheck-tests'
|
||||
rootdir = filepath
|
||||
# for dirname, subdirlist, filelist in os.walk(rootdir):
|
||||
for dirname, dummy, filelist in os.walk(rootdir):
|
||||
# for dirname, subdirlist, filelist in salt.utils.path.os_walk(rootdir):
|
||||
for dirname, dummy, filelist in salt.utils.path.os_walk(rootdir):
|
||||
for fname in filelist:
|
||||
if fname.endswith('.tst'):
|
||||
start_path = dirname + os.sep + fname
|
||||
|
@ -612,8 +613,8 @@ class StateTestLoader(object):
|
|||
rootdir = full_path
|
||||
if os.path.isdir(full_path):
|
||||
log.info("searching path= {}".format(full_path))
|
||||
# for dirname, subdirlist, filelist in os.walk(rootdir, topdown=True):
|
||||
for dirname, subdirlist, dummy in os.walk(rootdir, topdown=True):
|
||||
# for dirname, subdirlist, filelist in salt.utils.path.os_walk(rootdir, topdown=True):
|
||||
for dirname, subdirlist, dummy in salt.utils.path.os_walk(rootdir, topdown=True):
|
||||
if "saltcheck-tests" in subdirlist:
|
||||
self.gather_files(dirname)
|
||||
log.info("test_files list: {}".format(self.test_files))
|
||||
|
|
|
@ -52,6 +52,7 @@ import salt.utils.extmods
|
|||
import salt.utils.files
|
||||
import salt.utils.functools
|
||||
import salt.utils.minion
|
||||
import salt.utils.path
|
||||
import salt.utils.process
|
||||
import salt.utils.url
|
||||
import salt.utils.versions
|
||||
|
@ -753,7 +754,7 @@ def list_extmods():
|
|||
mod_types = os.listdir(ext_dir)
|
||||
for mod_type in mod_types:
|
||||
ret[mod_type] = set()
|
||||
for _, _, files in os.walk(os.path.join(ext_dir, mod_type)):
|
||||
for _, _, files in salt.utils.path.os_walk(os.path.join(ext_dir, mod_type)):
|
||||
for fh_ in files:
|
||||
ret[mod_type].add(fh_.split('.')[0])
|
||||
ret[mod_type] = list(ret[mod_type])
|
||||
|
|
|
@ -13,6 +13,7 @@ import stat
|
|||
|
||||
# Import Salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -231,7 +232,7 @@ def interfaces(root):
|
|||
reads = []
|
||||
writes = []
|
||||
|
||||
for path, _, files in os.walk(root, followlinks=False):
|
||||
for path, _, files in salt.utils.path.os_walk(root, followlinks=False):
|
||||
for afile in files:
|
||||
canpath = os.path.join(path, afile)
|
||||
|
||||
|
|
|
@ -120,7 +120,7 @@ def _get_zone_etc_localtime():
|
|||
hash_type = __opts__.get('hash_type', 'md5')
|
||||
tzfile_hash = salt.utils.hashutils.get_hash(tzfile, hash_type)
|
||||
# Not a link, just a copy of the tzdata file
|
||||
for root, dirs, files in os.walk(tzdir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(tzdir):
|
||||
for filename in files:
|
||||
full_path = os.path.join(root, filename)
|
||||
olson_name = full_path[tzdir_len:]
|
||||
|
|
|
@ -241,7 +241,7 @@ def _iter_service_names():
|
|||
# is named rc-sysinit, while a configuration file /etc/init/net/apache.conf
|
||||
# is named net/apache'
|
||||
init_root = '/etc/init/'
|
||||
for root, dirnames, filenames in os.walk(init_root):
|
||||
for root, dirnames, filenames in salt.utils.path.os_walk(init_root):
|
||||
relpath = os.path.relpath(root, init_root)
|
||||
for filename in fnmatch.filter(filenames, '*.conf'):
|
||||
if relpath == '.':
|
||||
|
|
|
@ -52,6 +52,7 @@ import time
|
|||
from salt.exceptions import CommandExecutionError, SaltInvocationError
|
||||
import salt.utils.dictupdate as dictupdate
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
|
||||
|
@ -2746,7 +2747,7 @@ def _processPolicyDefinitions(policy_def_path='c:\\Windows\\PolicyDefinitions',
|
|||
policydefs_resources_localname_xpath = etree.XPath(
|
||||
'//*[local-name() = "policyDefinitionResources"]/*')
|
||||
policydef_resources_xpath = etree.XPath('/policyDefinitionResources')
|
||||
for root, dirs, files in os.walk(policy_def_path):
|
||||
for root, dirs, files in salt.utils.path.os_walk(policy_def_path):
|
||||
if root == policy_def_path:
|
||||
for t_admfile in files:
|
||||
admfile = os.path.join(root, t_admfile)
|
||||
|
|
|
@ -600,7 +600,7 @@ def refresh_db(**kwargs):
|
|||
# Clear minion repo-ng cache see #35342 discussion
|
||||
log.info('Removing all *.sls files under \'%s\'', repo_details.local_dest)
|
||||
failed = []
|
||||
for root, _, files in os.walk(repo_details.local_dest, followlinks=False):
|
||||
for root, _, files in salt.utils.path.os_walk(repo_details.local_dest, followlinks=False):
|
||||
for name in files:
|
||||
if name.endswith('.sls'):
|
||||
full_filename = os.path.join(root, name)
|
||||
|
@ -745,7 +745,7 @@ def genrepo(**kwargs):
|
|||
ret['errors'] = {}
|
||||
repo_details = _get_repo_details(saltenv)
|
||||
|
||||
for root, _, files in os.walk(repo_details.local_dest, followlinks=False):
|
||||
for root, _, files in salt.utils.path.os_walk(repo_details.local_dest, followlinks=False):
|
||||
short_path = os.path.relpath(root, repo_details.local_dest)
|
||||
if short_path == '.':
|
||||
short_path = ''
|
||||
|
|
|
@ -48,6 +48,7 @@ import salt.utils.files
|
|||
import salt.utils.functools
|
||||
import salt.utils.itertools
|
||||
import salt.utils.lazy
|
||||
import salt.utils.path
|
||||
import salt.utils.pkg
|
||||
import salt.utils.pkg.rpm
|
||||
import salt.utils.systemd
|
||||
|
@ -996,7 +997,7 @@ def list_downloaded():
|
|||
CACHE_DIR = os.path.join('/var/cache/', _yum())
|
||||
|
||||
ret = {}
|
||||
for root, dirnames, filenames in os.walk(CACHE_DIR):
|
||||
for root, dirnames, filenames in salt.utils.path.os_walk(CACHE_DIR):
|
||||
for filename in fnmatch.filter(filenames, '*.rpm'):
|
||||
package_path = os.path.join(root, filename)
|
||||
pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
|
||||
|
|
|
@ -42,6 +42,7 @@ from salt.ext.six.moves.urllib.request import urlopen as _urlopen
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
from salt.exceptions import CommandExecutionError
|
||||
|
||||
|
||||
|
@ -379,7 +380,7 @@ def _find_cfgs(path, cfgs=None):
|
|||
if os.path.isdir(fi) and (i not in ignored):
|
||||
dirs.append(fi)
|
||||
for fpath in dirs:
|
||||
for p, ids, ifs in os.walk(fpath):
|
||||
for p, ids, ifs in salt.utils.path.os_walk(fpath):
|
||||
for i in ifs:
|
||||
if i.endswith('.cfg'):
|
||||
cfgs.append(os.path.join(p, i))
|
||||
|
|
|
@ -2011,7 +2011,7 @@ def list_downloaded():
|
|||
CACHE_DIR = '/var/cache/zypp/packages/'
|
||||
|
||||
ret = {}
|
||||
for root, dirnames, filenames in os.walk(CACHE_DIR):
|
||||
for root, dirnames, filenames in salt.utils.path.os_walk(CACHE_DIR):
|
||||
for filename in fnmatch.filter(filenames, '*.rpm'):
|
||||
package_path = os.path.join(root, filename)
|
||||
pkg_info = __salt__['lowpkg.bin_pkg_info'](package_path)
|
||||
|
|
|
@ -190,6 +190,7 @@ import salt.loader
|
|||
import salt.utils.dictupdate
|
||||
import salt.utils.files
|
||||
import salt.utils.minions
|
||||
import salt.utils.path
|
||||
import salt.utils.stringio
|
||||
import salt.template
|
||||
|
||||
|
@ -199,7 +200,7 @@ log = logging.getLogger(__name__)
|
|||
|
||||
def _on_walk_error(err):
|
||||
'''
|
||||
Log os.walk() error.
|
||||
Log salt.utils.path.os_walk() error.
|
||||
'''
|
||||
log.error('%s: %s', err.filename, err.strerror)
|
||||
|
||||
|
@ -239,7 +240,7 @@ def _construct_pillar(top_dir,
|
|||
renderers = salt.loader.render(__opts__, __salt__)
|
||||
|
||||
norm_top_dir = os.path.normpath(top_dir)
|
||||
for dir_path, dir_names, file_names in os.walk(
|
||||
for dir_path, dir_names, file_names in salt.utils.path.os_walk(
|
||||
top_dir, topdown=True, onerror=_on_walk_error,
|
||||
followlinks=follow_dir_links):
|
||||
# Find current path in pillar tree
|
||||
|
|
|
@ -24,6 +24,7 @@ import salt.key
|
|||
import salt.utils.compat
|
||||
import salt.utils.files
|
||||
import salt.utils.minions
|
||||
import salt.utils.path
|
||||
import salt.utils.raetevent
|
||||
import salt.utils.versions
|
||||
import salt.client
|
||||
|
@ -139,7 +140,7 @@ def key_regen():
|
|||
print(client_error)
|
||||
return False
|
||||
|
||||
for root, _, files in os.walk(__opts__['pki_dir']):
|
||||
for root, _, files in salt.utils.path.os_walk(__opts__['pki_dir']):
|
||||
for fn_ in files:
|
||||
path = os.path.join(root, fn_)
|
||||
try:
|
||||
|
|
|
@ -21,6 +21,7 @@ except ImportError:
|
|||
from salt.exceptions import CommandExecutionError, SaltRenderError
|
||||
import salt.utils.files
|
||||
import salt.utils.gitfs
|
||||
import salt.utils.path
|
||||
import logging
|
||||
import salt.minion
|
||||
import salt.loader
|
||||
|
@ -65,7 +66,7 @@ def genrepo(opts=None, fire_event=True):
|
|||
if not os.path.exists(winrepo_dir):
|
||||
os.makedirs(winrepo_dir)
|
||||
renderers = salt.loader.render(opts, __salt__)
|
||||
for root, _, files in os.walk(winrepo_dir):
|
||||
for root, _, files in salt.utils.path.os_walk(winrepo_dir):
|
||||
for name in files:
|
||||
if name.endswith('.sls'):
|
||||
try:
|
||||
|
|
|
@ -33,6 +33,7 @@ from salt.ext.six.moves import filter
|
|||
from salt.template import compile_template
|
||||
import salt.utils.files
|
||||
import salt.utils.http as http
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.win_functions
|
||||
from salt.utils.yamldumper import SafeOrderedDumper
|
||||
|
@ -615,7 +616,7 @@ class SPMClient(object):
|
|||
if os.path.exists(self.opts['spm_repos_config']):
|
||||
repo_files.append(self.opts['spm_repos_config'])
|
||||
|
||||
for (dirpath, dirnames, filenames) in os.walk('{0}.d'.format(self.opts['spm_repos_config'])):
|
||||
for (dirpath, dirnames, filenames) in salt.utils.path.os_walk('{0}.d'.format(self.opts['spm_repos_config'])):
|
||||
for repo_file in filenames:
|
||||
if not repo_file.endswith('.repo'):
|
||||
continue
|
||||
|
@ -726,7 +727,7 @@ class SPMClient(object):
|
|||
|
||||
old_files = []
|
||||
repo_metadata = {}
|
||||
for (dirpath, dirnames, filenames) in os.walk(repo_path):
|
||||
for (dirpath, dirnames, filenames) in salt.utils.path.os_walk(repo_path):
|
||||
for spm_file in filenames:
|
||||
if not spm_file.endswith('.spm'):
|
||||
continue
|
||||
|
|
|
@ -286,6 +286,7 @@ import salt.utils.dateutils
|
|||
import salt.utils.dictupdate
|
||||
import salt.utils.files
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.stringutils
|
||||
import salt.utils.templates
|
||||
|
@ -567,7 +568,7 @@ def _gen_keep_files(name, require, walk_d=None):
|
|||
def _process(name):
|
||||
ret = set()
|
||||
if os.path.isdir(name):
|
||||
for root, dirs, files in os.walk(name):
|
||||
for root, dirs, files in salt.utils.path.os_walk(name):
|
||||
ret.add(name)
|
||||
for name in files:
|
||||
ret.add(os.path.join(root, name))
|
||||
|
@ -654,7 +655,7 @@ def _clean_dir(root, keep, exclude_pat):
|
|||
except OSError:
|
||||
__salt__['file.remove'](nfn)
|
||||
|
||||
for roots, dirs, files in os.walk(root):
|
||||
for roots, dirs, files in salt.utils.path.os_walk(root):
|
||||
for name in itertools.chain(dirs, files):
|
||||
_delete_not_kept(os.path.join(roots, name))
|
||||
return list(removed)
|
||||
|
@ -2605,7 +2606,7 @@ def _depth_limited_walk(top, max_depth=None):
|
|||
Walk the directory tree under root up till reaching max_depth.
|
||||
With max_depth=None (default), do not limit depth.
|
||||
'''
|
||||
for root, dirs, files in os.walk(top):
|
||||
for root, dirs, files in salt.utils.path.os_walk(top):
|
||||
if max_depth is not None:
|
||||
rel_depth = root.count(os.path.sep) - top.count(os.path.sep)
|
||||
if rel_depth >= max_depth:
|
||||
|
@ -5465,7 +5466,7 @@ def copy(
|
|||
try:
|
||||
if os.path.isdir(source):
|
||||
shutil.copytree(source, name, symlinks=True)
|
||||
for root, dirs, files in os.walk(name):
|
||||
for root, dirs, files in salt.utils.path.os_walk(name):
|
||||
for dir_ in dirs:
|
||||
__salt__['file.lchown'](os.path.join(root, dir_), user, group)
|
||||
for file_ in files:
|
||||
|
|
|
@ -13,6 +13,7 @@ import itertools
|
|||
import salt.runner
|
||||
import salt.config
|
||||
import salt.syspaths
|
||||
import salt.utils.path
|
||||
|
||||
|
||||
def __virtual__():
|
||||
|
@ -69,7 +70,7 @@ def genrepo(name, force=False, allow_empty=False):
|
|||
ret['comment'] = '{0} is missing'.format(winrepo_cachefile)
|
||||
else:
|
||||
winrepo_cachefile_mtime = os.stat(winrepo_cachefile)[stat.ST_MTIME]
|
||||
for root, dirs, files in os.walk(winrepo_dir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(winrepo_dir):
|
||||
for name in itertools.chain(files, dirs):
|
||||
full_path = os.path.join(root, name)
|
||||
if os.stat(full_path)[stat.ST_MTIME] > winrepo_cachefile_mtime:
|
||||
|
|
|
@ -11,6 +11,7 @@ import os
|
|||
import logging
|
||||
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.payload
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -91,7 +92,7 @@ def list_tokens(opts):
|
|||
:returns: List of dicts (tokens)
|
||||
'''
|
||||
ret = []
|
||||
for (dirpath, dirnames, filenames) in os.walk(opts['token_dir']):
|
||||
for (dirpath, dirnames, filenames) in salt.utils.path.os_walk(opts['token_dir']):
|
||||
for token in filenames:
|
||||
ret.append(token)
|
||||
return ret
|
||||
|
|
|
@ -12,6 +12,7 @@ import shutil
|
|||
# Import salt libs
|
||||
import salt.fileclient
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.utils.url
|
||||
|
||||
# Import 3rd-party libs
|
||||
|
@ -22,7 +23,7 @@ log = logging.getLogger(__name__)
|
|||
|
||||
def _list_emptydirs(rootdir):
|
||||
emptydirs = []
|
||||
for root, dirs, files in os.walk(rootdir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(rootdir):
|
||||
if not files and not dirs:
|
||||
emptydirs.append(root)
|
||||
return emptydirs
|
||||
|
@ -30,7 +31,7 @@ def _list_emptydirs(rootdir):
|
|||
|
||||
def _listdir_recursively(rootdir):
|
||||
file_list = []
|
||||
for root, dirs, files in os.walk(rootdir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(rootdir):
|
||||
for filename in files:
|
||||
relpath = os.path.relpath(root, rootdir).strip('.')
|
||||
file_list.append(os.path.join(relpath, filename))
|
||||
|
|
|
@ -106,7 +106,7 @@ def recursive_copy(source, dest):
|
|||
|
||||
(identical to cp -r on a unix machine)
|
||||
'''
|
||||
for root, _, files in os.walk(source):
|
||||
for root, _, files in salt.utils.path.os_walk(source):
|
||||
path_from_source = root.replace(source, '').lstrip(os.sep)
|
||||
target_directory = os.path.join(dest, path_from_source)
|
||||
if not os.path.exists(target_directory):
|
||||
|
|
|
@ -107,6 +107,7 @@ from salt.ext import six
|
|||
# Import salt libs
|
||||
import salt.utils.args
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.utils.stringutils
|
||||
import salt.defaults.exitcodes
|
||||
from salt.utils.filebuffer import BufferedReader
|
||||
|
@ -642,7 +643,7 @@ class Finder(object):
|
|||
for result in self._perform_actions(path, fstat=fstat):
|
||||
yield result
|
||||
|
||||
for dirpath, dirs, files in os.walk(path):
|
||||
for dirpath, dirs, files in salt.utils.path.os_walk(path):
|
||||
relpath = os.path.relpath(dirpath, path)
|
||||
depth = path_depth(relpath) + 1
|
||||
if depth >= self.mindepth and (self.maxdepth is None or self.maxdepth >= depth):
|
||||
|
|
|
@ -401,3 +401,15 @@ def safe_path(path, allow_path=None):
|
|||
good_path = True
|
||||
|
||||
return good_path
|
||||
|
||||
|
||||
def os_walk(top, *args, **kwargs):
|
||||
'''
|
||||
This is a helper to ensure that we get unicode paths when walking a
|
||||
filesystem. The reason for this is that when using os.walk, the paths in
|
||||
the generator which is returned are all the same type as the top directory
|
||||
passed in. This can cause problems when a str path is passed and the
|
||||
filesystem underneath that path contains files with unicode characters in
|
||||
the filename.
|
||||
'''
|
||||
return os.walk(salt.utils.stringutils.to_unicode(top), *args, **kwargs)
|
||||
|
|
|
@ -13,6 +13,8 @@ from __future__ import absolute_import
|
|||
import logging
|
||||
import os
|
||||
|
||||
import salt.utils.path
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -102,7 +104,7 @@ def get_modules():
|
|||
continue
|
||||
|
||||
# get a list of all files in the root_path
|
||||
for root_dir, sub_dirs, file_names in os.walk(root_path):
|
||||
for root_dir, sub_dirs, file_names in salt.utils.path.os_walk(root_path):
|
||||
for file_name in file_names:
|
||||
base_name, file_extension = os.path.splitext(file_name)
|
||||
|
||||
|
|
|
@ -1,12 +1,18 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Import Python libs
|
||||
from __future__ import absolute_import
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from salt.ext.six import iteritems
|
||||
import yaml
|
||||
from jinja2 import FileSystemLoader, Environment
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.path
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext.six import iteritems
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
|
@ -40,7 +46,7 @@ def get_class(_class, salt_data):
|
|||
sub_init = '{0}/classes/{1}/init.yml'.format(saltclass_path,
|
||||
_class.replace('.', '/'))
|
||||
|
||||
for root, dirs, files in os.walk('{0}/classes'.format(saltclass_path)):
|
||||
for root, dirs, files in salt.utils.path.os_walk('{0}/classes'.format(saltclass_path)):
|
||||
for l_file in files:
|
||||
l_files.append('{0}/{1}'.format(root, l_file))
|
||||
|
||||
|
@ -214,7 +220,7 @@ def expanded_dict_from_minion(minion_id, salt_data):
|
|||
_file = ''
|
||||
saltclass_path = salt_data['path']
|
||||
# Start
|
||||
for root, dirs, files in os.walk('{0}/nodes'.format(saltclass_path)):
|
||||
for root, dirs, files in salt.utils.path.os_walk('{0}/nodes'.format(saltclass_path)):
|
||||
for minion_file in files:
|
||||
if minion_file == '{0}.yml'.format(minion_id):
|
||||
_file = os.path.join(root, minion_file)
|
||||
|
|
|
@ -72,6 +72,7 @@ except ImportError:
|
|||
import salt
|
||||
import salt.utils.files
|
||||
import salt.utils.hashutils
|
||||
import salt.utils.path
|
||||
import salt.exceptions
|
||||
import salt.version
|
||||
|
||||
|
@ -300,7 +301,7 @@ def gen_thin(cachedir, extra_mods='', overwrite=False, so_mods='',
|
|||
elif compress == 'zip':
|
||||
tfp.write(base, arcname=os.path.join('py{0}'.format(py_ver), base))
|
||||
continue
|
||||
for root, dirs, files in os.walk(base, followlinks=True):
|
||||
for root, dirs, files in salt.utils.path.os_walk(base, followlinks=True):
|
||||
for name in files:
|
||||
if not name.endswith(('.pyc', '.pyo')):
|
||||
if compress == 'gzip':
|
||||
|
@ -594,7 +595,7 @@ def gen_min(cachedir, extra_mods='', overwrite=False, so_mods='',
|
|||
# top is a single file module
|
||||
tfp.add(base, arcname=os.path.join('py{0}'.format(py_ver), base))
|
||||
continue
|
||||
for root, dirs, files in os.walk(base, followlinks=True):
|
||||
for root, dirs, files in salt.utils.path.os_walk(base, followlinks=True):
|
||||
for name in files:
|
||||
if name.endswith(('.pyc', '.pyo')):
|
||||
continue
|
||||
|
|
|
@ -28,6 +28,7 @@ from salt.exceptions import SaltClientError, SaltSystemExit, \
|
|||
CommandExecutionError
|
||||
import salt.defaults.exitcodes
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
import salt.utils.platform
|
||||
import salt.utils.user
|
||||
import salt.utils.versions
|
||||
|
@ -269,7 +270,7 @@ def verify_env(
|
|||
fsubdir = os.path.join(dir_, subdir)
|
||||
if '{0}jobs'.format(os.path.sep) in fsubdir:
|
||||
continue
|
||||
for root, dirs, files in os.walk(fsubdir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(fsubdir):
|
||||
for name in files:
|
||||
if name.startswith('.'):
|
||||
continue
|
||||
|
|
|
@ -9,6 +9,7 @@ import os
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
@ -43,7 +44,7 @@ def list_env(saltenv='base'):
|
|||
return ret
|
||||
for f_root in __opts__['file_roots'][saltenv]:
|
||||
ret[f_root] = {}
|
||||
for root, dirs, files in os.walk(f_root):
|
||||
for root, dirs, files in salt.utils.path.os_walk(f_root):
|
||||
sub = ret[f_root]
|
||||
if root != f_root:
|
||||
# grab subroot ref
|
||||
|
|
|
@ -10,6 +10,7 @@ import os
|
|||
|
||||
# Import salt libs
|
||||
import salt.utils.files
|
||||
import salt.utils.path
|
||||
|
||||
# Import 3rd-party libs
|
||||
from salt.ext import six
|
||||
|
@ -44,7 +45,7 @@ def list_env(saltenv='base'):
|
|||
return ret
|
||||
for f_root in __opts__['pillar_roots'][saltenv]:
|
||||
ret[f_root] = {}
|
||||
for root, dirs, files in os.walk(f_root):
|
||||
for root, dirs, files in salt.utils.path.os_walk(f_root):
|
||||
sub = ret[f_root]
|
||||
if root != f_root:
|
||||
# grab subroot ref
|
||||
|
|
|
@ -598,7 +598,7 @@ Repository 'DUMMY' not found by its alias, number, or URI.
|
|||
self.assertEqual(len(list_patches), 3)
|
||||
self.assertDictEqual(list_patches, PATCHES_RET)
|
||||
|
||||
@patch('os.walk', MagicMock(return_value=[('test', 'test', 'test')]))
|
||||
@patch('salt.utils.path.os_walk', MagicMock(return_value=[('test', 'test', 'test')]))
|
||||
@patch('os.path.getsize', MagicMock(return_value=123456))
|
||||
@patch('os.path.getctime', MagicMock(return_value=1234567890.123456))
|
||||
@patch('fnmatch.filter', MagicMock(return_value=['/var/cache/zypper/packages/foo/bar/test_package.rpm']))
|
||||
|
|
|
@ -19,6 +19,7 @@ from tests.support.mock import (
|
|||
|
||||
# Import Salt Libs
|
||||
import salt.config
|
||||
import salt.utils.path
|
||||
from salt.syspaths import BASE_FILE_ROOTS_DIR
|
||||
import salt.states.winrepo as winrepo
|
||||
|
||||
|
@ -77,7 +78,7 @@ class WinrepoTestCase(TestCase, LoaderModuleMockMixin):
|
|||
mock = MagicMock(return_value=[0, 1, 2, 3, 4, 5, 6, 7, 8])
|
||||
with patch.object(os, 'stat', mock):
|
||||
mock = MagicMock(return_value=[])
|
||||
with patch.object(os, 'walk', mock):
|
||||
with patch.object(salt.utils.path, 'os_walk', mock):
|
||||
with patch.dict(winrepo.__opts__, {'test': True}):
|
||||
ret.update({'comment': '', 'result': None})
|
||||
self.assertDictEqual(winrepo.genrepo('salt'), ret)
|
||||
|
|
|
@ -9,6 +9,9 @@ from __future__ import absolute_import
|
|||
import fnmatch
|
||||
import os
|
||||
|
||||
# Import Salt libs
|
||||
import salt.utils.path
|
||||
|
||||
# Import Salt Testing libs
|
||||
from tests.support.unit import TestCase
|
||||
from tests.support.paths import CODE_DIR
|
||||
|
@ -65,7 +68,7 @@ class BadTestModuleNamesTestCase(TestCase):
|
|||
excluded_dirs, included_dirs = tuple(EXCLUDED_DIRS), tuple(INCLUDED_DIRS)
|
||||
tests_dir = os.path.join(CODE_DIR, 'tests')
|
||||
bad_names = []
|
||||
for root, dirs, files in os.walk(tests_dir):
|
||||
for root, dirs, files in salt.utils.path.os_walk(tests_dir):
|
||||
reldir = os.path.relpath(root, CODE_DIR)
|
||||
if (reldir.startswith(excluded_dirs) and not self._match_dirs(reldir, included_dirs)) \
|
||||
or reldir.endswith('__pycache__'):
|
||||
|
|
Loading…
Add table
Reference in a new issue