mirror of
https://github.com/saltstack/salt.git
synced 2025-04-07 04:51:38 +00:00
Migrated some invoke
tasks to python-tools-scripts
* `tasks/docs.py` -> `tools/precommit/docs.py` * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` Refs #64374 Signed-off-by: Pedro Algarvio <palgarvio@vmware.com>
This commit is contained in:
parent
54ae2e5e84
commit
fce51983b3
6 changed files with 259 additions and 284 deletions
|
@ -55,12 +55,7 @@ repos:
|
|||
- pre-commit
|
||||
- changelog
|
||||
- pre-commit-checks
|
||||
additional_dependencies:
|
||||
- boto3==1.21.46
|
||||
- pyyaml==6.0.1
|
||||
- jinja2==3.1.2
|
||||
- packaging==23.0
|
||||
- virustotal3==1.0.8
|
||||
|
||||
- id: tools
|
||||
alias: generate-workflows
|
||||
name: Generate GitHub Workflow Templates
|
||||
|
@ -70,12 +65,7 @@ repos:
|
|||
- pre-commit
|
||||
- workflows
|
||||
- generate-workflows
|
||||
additional_dependencies:
|
||||
- boto3==1.21.46
|
||||
- pyyaml==6.0.1
|
||||
- jinja2==3.1.2
|
||||
- packaging==23.0
|
||||
- virustotal3==1.0.8
|
||||
|
||||
- id: tools
|
||||
alias: actionlint
|
||||
name: Lint GitHub Actions Workflows
|
||||
|
@ -86,18 +76,51 @@ repos:
|
|||
- pre-commit
|
||||
- workflows
|
||||
- actionlint
|
||||
additional_dependencies:
|
||||
- boto3==1.21.46
|
||||
- pyyaml==6.0.1
|
||||
- jinja2==3.1.2
|
||||
- packaging==23.0
|
||||
- virustotal3==1.0.8
|
||||
|
||||
- id: tools
|
||||
alias: check-docs
|
||||
name: Check Docs
|
||||
files: ^(salt/.*\.py|doc/ref/.*\.rst)$
|
||||
args:
|
||||
- pre-commit
|
||||
- docs
|
||||
- check
|
||||
|
||||
- id: tools
|
||||
alias: check-docstrings
|
||||
name: Check docstrings
|
||||
files: salt/.*\.py$
|
||||
exclude: >
|
||||
(?x)^(
|
||||
templates/.*|
|
||||
salt/ext/.*|
|
||||
)$
|
||||
args:
|
||||
- pre-commit
|
||||
- docstrings
|
||||
- check
|
||||
|
||||
- id: tools
|
||||
alias: check-known-missing-docstrings
|
||||
name: Check Known Missing Docstrings
|
||||
stages: [manual]
|
||||
files: salt/.*\.py$
|
||||
exclude: >
|
||||
(?x)^(
|
||||
templates/.*|
|
||||
salt/ext/.*|
|
||||
)$
|
||||
args:
|
||||
- pre-commit
|
||||
- docstrings
|
||||
- check
|
||||
|
||||
# ----- Packaging Requirements ------------------------------------------------------------------------------------>
|
||||
|
||||
- repo: https://github.com/saltstack/pip-tools-compile-impersonate
|
||||
rev: "4.8"
|
||||
hooks:
|
||||
|
||||
# ----- Packaging Requirements ------------------------------------------------------------------------------------>
|
||||
- id: pip-tools-compile
|
||||
alias: compile-pkg-linux-3.7-zmq-requirements
|
||||
name: Linux Packaging Py3.7 ZeroMQ Requirements
|
||||
|
@ -1205,24 +1228,6 @@ repos:
|
|||
# <---- Security ---------------------------------------------------------------------------------------------------
|
||||
|
||||
# ----- Pre-Commit ------------------------------------------------------------------------------------------------>
|
||||
- repo: https://github.com/saltstack/invoke-pre-commit
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: invoke
|
||||
alias: check-docs
|
||||
name: Check Docs
|
||||
files: ^(salt/.*\.py|doc/ref/.*\.rst)$
|
||||
args:
|
||||
- docs.check
|
||||
additional_dependencies:
|
||||
- blessings==1.7
|
||||
- pyyaml==6.0.1
|
||||
- distro==1.7.0
|
||||
- jinja2==3.0.3
|
||||
- msgpack==1.0.3
|
||||
- packaging
|
||||
- looseversion
|
||||
|
||||
- repo: https://github.com/saltstack/invoke-pre-commit
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
|
@ -1242,9 +1247,6 @@ repos:
|
|||
- packaging
|
||||
- looseversion
|
||||
|
||||
- repo: https://github.com/saltstack/invoke-pre-commit
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: invoke
|
||||
alias: loader-check-virtual
|
||||
name: Check loader modules __virtual__
|
||||
|
@ -1265,29 +1267,6 @@ repos:
|
|||
- packaging
|
||||
- looseversion
|
||||
|
||||
- repo: https://github.com/saltstack/invoke-pre-commit
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
- id: invoke
|
||||
alias: check-docstrings
|
||||
name: Check docstrings
|
||||
files: salt/.*\.py$
|
||||
exclude: >
|
||||
(?x)^(
|
||||
templates/.*|
|
||||
salt/ext/.*|
|
||||
)$
|
||||
args:
|
||||
- docstrings.check
|
||||
additional_dependencies:
|
||||
- blessings==1.7
|
||||
- pyyaml==6.0.1
|
||||
- distro==1.7.0
|
||||
- jinja2==3.0.3
|
||||
- msgpack==1.0.3
|
||||
- packaging
|
||||
- looseversion
|
||||
|
||||
- repo: https://github.com/saltstack/invoke-pre-commit
|
||||
rev: v1.9.0
|
||||
hooks:
|
||||
|
@ -1314,13 +1293,17 @@ repos:
|
|||
- looseversion
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.0.0
|
||||
rev: v1.3.0
|
||||
hooks:
|
||||
- id: mypy
|
||||
alias: mypy-tools
|
||||
name: Run mypy against tools
|
||||
files: ^tools/.*\.py$
|
||||
#args: [--strict]
|
||||
exclude: >
|
||||
(?x)^(
|
||||
templates/.*|
|
||||
salt/.*|
|
||||
)$
|
||||
additional_dependencies:
|
||||
- attrs
|
||||
- rich
|
||||
|
|
4
changelog/64374.fixed.md
Normal file
4
changelog/64374.fixed.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
Migrated some [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts).
|
||||
|
||||
* `tasks/docs.py` -> `tools/precommit/docs.py`
|
||||
* `tasks/docstrings.py` -> `tools/precommit/docstrings.py`
|
14
setup.cfg
14
setup.cfg
|
@ -3,10 +3,22 @@ owner = root
|
|||
group = root
|
||||
|
||||
[mypy]
|
||||
packages = tools
|
||||
exclude = (?x)(
|
||||
salt
|
||||
| tests
|
||||
).*\.py
|
||||
implicit_optional = True
|
||||
show_error_codes = True
|
||||
warn_return_any = True
|
||||
warn_unused_configs = True
|
||||
|
||||
[mypy.tools]
|
||||
[mypy-tools.*]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tools.precommit.docstrings]
|
||||
follow_imports = silent
|
||||
|
||||
[mypy-salt.*]
|
||||
follow_imports = silent
|
||||
ignore_missing_imports = True
|
||||
|
|
|
@ -42,9 +42,11 @@ ptscripts.register_tools_module("tools.pkg.repo.publish")
|
|||
ptscripts.register_tools_module("tools.precommit")
|
||||
ptscripts.register_tools_module("tools.precommit.changelog")
|
||||
ptscripts.register_tools_module("tools.precommit.workflows")
|
||||
ptscripts.register_tools_module("tools.precommit.docs")
|
||||
ptscripts.register_tools_module("tools.precommit.docstrings")
|
||||
ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG)
|
||||
ptscripts.register_tools_module("tools.testsuite")
|
||||
ptscripts.register_tools_module("tools.testsuite.download")
|
||||
ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG)
|
||||
ptscripts.register_tools_module("tools.vm")
|
||||
|
||||
for name in ("boto3", "botocore", "urllib3"):
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
"""
|
||||
tasks.docstrings
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Check salt code base for for missing or wrong docstrings
|
||||
Check salt code base for for missing or wrong docs
|
||||
"""
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import collections
|
||||
|
@ -11,21 +10,18 @@ import os
|
|||
import pathlib
|
||||
import re
|
||||
|
||||
from invoke import task # pylint: disable=3rd-party-module-not-gated
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
from tasks import utils
|
||||
import tools.utils
|
||||
|
||||
CODE_DIR = pathlib.Path(__file__).resolve().parent.parent
|
||||
DOCS_DIR = CODE_DIR / "doc"
|
||||
SALT_CODE_DIR = CODE_DIR / "salt"
|
||||
DOCS_DIR = tools.utils.REPO_ROOT / "doc"
|
||||
SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt"
|
||||
|
||||
os.chdir(str(CODE_DIR))
|
||||
|
||||
python_module_to_doc_path = {}
|
||||
doc_path_to_python_module = {}
|
||||
PYTHON_MODULE_TO_DOC_PATH = {}
|
||||
DOC_PATH_TO_PYTHON_MODULE = {}
|
||||
|
||||
|
||||
check_paths = (
|
||||
CHECK_PATHS = (
|
||||
"salt/auth",
|
||||
"salt/beacons",
|
||||
"salt/cache",
|
||||
|
@ -52,12 +48,14 @@ check_paths = (
|
|||
"salt/tops",
|
||||
"salt/wheel",
|
||||
)
|
||||
exclude_paths = (
|
||||
EXCLUDE_PATHS = (
|
||||
"salt/cloud/cli.py",
|
||||
"salt/cloud/exceptions.py",
|
||||
"salt/cloud/libcloudfuncs.py",
|
||||
)
|
||||
|
||||
cgroup = command_group(name="docs", help=__doc__, parent="pre-commit")
|
||||
|
||||
|
||||
def build_path_cache():
|
||||
"""
|
||||
|
@ -65,13 +63,13 @@ def build_path_cache():
|
|||
"""
|
||||
|
||||
for path in SALT_CODE_DIR.rglob("*.py"):
|
||||
path = path.resolve().relative_to(CODE_DIR)
|
||||
path = path.resolve().relative_to(tools.utils.REPO_ROOT)
|
||||
strpath = str(path)
|
||||
if strpath.endswith("__init__.py"):
|
||||
continue
|
||||
if not strpath.startswith(check_paths):
|
||||
if not strpath.startswith(CHECK_PATHS):
|
||||
continue
|
||||
if strpath.startswith(exclude_paths):
|
||||
if strpath.startswith(EXCLUDE_PATHS):
|
||||
continue
|
||||
|
||||
parts = list(path.parts)
|
||||
|
@ -113,32 +111,21 @@ def build_path_cache():
|
|||
/ "all"
|
||||
/ str(path).replace(".py", ".rst").replace(os.sep, ".")
|
||||
)
|
||||
stub_path = stub_path.relative_to(CODE_DIR)
|
||||
python_module_to_doc_path[path] = stub_path
|
||||
stub_path = stub_path.relative_to(tools.utils.REPO_ROOT)
|
||||
PYTHON_MODULE_TO_DOC_PATH[path] = stub_path
|
||||
if path.exists():
|
||||
doc_path_to_python_module[stub_path] = path
|
||||
DOC_PATH_TO_PYTHON_MODULE[stub_path] = path
|
||||
|
||||
|
||||
build_path_cache()
|
||||
|
||||
|
||||
def build_file_list(files, extension):
|
||||
# Unfortunately invoke does not support nargs.
|
||||
# We migth have been passed --files="foo.py bar.py"
|
||||
# Turn that into a list of paths
|
||||
_files = []
|
||||
for path in files:
|
||||
if not path:
|
||||
continue
|
||||
for spath in path.split():
|
||||
if not spath.endswith(extension):
|
||||
continue
|
||||
_files.append(spath)
|
||||
if not _files:
|
||||
_files = CODE_DIR.rglob("*{}".format(extension))
|
||||
if not files:
|
||||
_files = tools.utils.REPO_ROOT.rglob("*{}".format(extension))
|
||||
else:
|
||||
_files = [pathlib.Path(fname).resolve() for fname in _files]
|
||||
_files = [path.relative_to(CODE_DIR) for path in _files]
|
||||
_files = [fpath.resolve() for fpath in files if fpath.suffix == extension]
|
||||
_files = [path.relative_to(tools.utils.REPO_ROOT) for path in _files]
|
||||
return _files
|
||||
|
||||
|
||||
|
@ -148,9 +135,9 @@ def build_python_module_paths(files):
|
|||
strpath = str(path)
|
||||
if strpath.endswith("__init__.py"):
|
||||
continue
|
||||
if not strpath.startswith(check_paths):
|
||||
if not strpath.startswith(CHECK_PATHS):
|
||||
continue
|
||||
if strpath.startswith(exclude_paths):
|
||||
if strpath.startswith(EXCLUDE_PATHS):
|
||||
continue
|
||||
_files.append(path)
|
||||
return _files
|
||||
|
@ -160,8 +147,7 @@ def build_docs_paths(files):
|
|||
return build_file_list(files, ".rst")
|
||||
|
||||
|
||||
@task(iterable=["files"], positional=["files"])
|
||||
def check_inline_markup(ctx, files):
|
||||
def check_inline_markup(ctx: Context, files: list[pathlib.Path]) -> int:
|
||||
"""
|
||||
Check docstring for :doc: usage
|
||||
|
||||
|
@ -174,9 +160,6 @@ def check_inline_markup(ctx, files):
|
|||
|
||||
https://github.com/saltstack/salt/issues/12788
|
||||
"""
|
||||
# CD into Salt's repo root directory
|
||||
ctx.cd(CODE_DIR)
|
||||
|
||||
files = build_python_module_paths(files)
|
||||
|
||||
exitcode = 0
|
||||
|
@ -188,18 +171,14 @@ def check_inline_markup(ctx, files):
|
|||
if not docstring:
|
||||
continue
|
||||
if ":doc:" in docstring:
|
||||
utils.error(
|
||||
"The {} function in {} contains ':doc:' usage", funcdef.name, path
|
||||
ctx.error(
|
||||
f"The {funcdef.name} function in {path} contains ':doc:' usage"
|
||||
)
|
||||
exitcode += 1
|
||||
return exitcode
|
||||
|
||||
|
||||
@task(iterable=["files"])
|
||||
def check_stubs(ctx, files):
|
||||
# CD into Salt's repo root directory
|
||||
ctx.cd(CODE_DIR)
|
||||
|
||||
def check_stubs(ctx: Context, files: list[pathlib.Path]) -> int:
|
||||
files = build_python_module_paths(files)
|
||||
|
||||
exitcode = 0
|
||||
|
@ -207,21 +186,20 @@ def check_stubs(ctx, files):
|
|||
strpath = str(path)
|
||||
if strpath.endswith("__init__.py"):
|
||||
continue
|
||||
if not strpath.startswith(check_paths):
|
||||
if not strpath.startswith(CHECK_PATHS):
|
||||
continue
|
||||
if strpath.startswith(exclude_paths):
|
||||
if strpath.startswith(EXCLUDE_PATHS):
|
||||
continue
|
||||
stub_path = python_module_to_doc_path[path]
|
||||
stub_path = PYTHON_MODULE_TO_DOC_PATH[path]
|
||||
if not stub_path.exists():
|
||||
exitcode += 1
|
||||
utils.error(
|
||||
"The module at {} does not have a sphinx stub at {}", path, stub_path
|
||||
ctx.error(
|
||||
f"The module at {path} does not have a sphinx stub at {stub_path}"
|
||||
)
|
||||
return exitcode
|
||||
|
||||
|
||||
@task(iterable=["files"])
|
||||
def check_virtual(ctx, files):
|
||||
def check_virtual(ctx: Context, files: list[pathlib.Path]) -> int:
|
||||
"""
|
||||
Check if .rst files for each module contains the text ".. _virtual"
|
||||
indicating it is a virtual doc page, and, in case a module exists by
|
||||
|
@ -235,22 +213,16 @@ def check_virtual(ctx, files):
|
|||
try:
|
||||
contents = path.read_text()
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
utils.error(
|
||||
"Error while processing '{}': {}".format(
|
||||
path,
|
||||
exc,
|
||||
)
|
||||
)
|
||||
ctx.error(f"Error while processing '{path}': {exc}")
|
||||
exitcode += 1
|
||||
continue
|
||||
if ".. _virtual-" in contents:
|
||||
try:
|
||||
python_module = doc_path_to_python_module[path]
|
||||
utils.error(
|
||||
"The doc file at {} indicates that it's virtual, yet, there's a"
|
||||
" python module at {} that will shaddow it.",
|
||||
path,
|
||||
python_module,
|
||||
python_module = DOC_PATH_TO_PYTHON_MODULE[path]
|
||||
ctx.error(
|
||||
f"The doc file at {path} indicates that it's virtual, yet, "
|
||||
f"there's a python module at {python_module} that will "
|
||||
"shaddow it.",
|
||||
)
|
||||
exitcode += 1
|
||||
except KeyError:
|
||||
|
@ -259,8 +231,7 @@ def check_virtual(ctx, files):
|
|||
return exitcode
|
||||
|
||||
|
||||
@task(iterable=["files"])
|
||||
def check_module_indexes(ctx, files):
|
||||
def check_module_indexes(ctx: Context, files: list[pathlib.Path]) -> int:
|
||||
exitcode = 0
|
||||
files = build_docs_paths(files)
|
||||
for path in files:
|
||||
|
@ -288,9 +259,8 @@ def check_module_indexes(ctx, files):
|
|||
)
|
||||
if module_index != sorted(module_index):
|
||||
exitcode += 1
|
||||
utils.error(
|
||||
"The autosummary mods in {} are not properly sorted. Please sort them.",
|
||||
path,
|
||||
ctx.error(
|
||||
f"The autosummary mods in {path} are not properly sorted. Please sort them.",
|
||||
)
|
||||
|
||||
module_index_duplicates = [
|
||||
|
@ -298,8 +268,8 @@ def check_module_indexes(ctx, files):
|
|||
]
|
||||
if module_index_duplicates:
|
||||
exitcode += 1
|
||||
utils.error(
|
||||
"Module index {} contains duplicates: {}", path, module_index_duplicates
|
||||
ctx.error(
|
||||
f"Module index {path} contains duplicates: {module_index_duplicates}"
|
||||
)
|
||||
# Let's check if all python modules are included in the index
|
||||
path_parts = list(path.parts)
|
||||
|
@ -320,7 +290,7 @@ def check_module_indexes(ctx, files):
|
|||
package = "log_handlers"
|
||||
path_parts = []
|
||||
python_package = SALT_CODE_DIR.joinpath(package, *path_parts).relative_to(
|
||||
CODE_DIR
|
||||
tools.utils.REPO_ROOT
|
||||
)
|
||||
modules = set()
|
||||
for module in python_package.rglob("*.py"):
|
||||
|
@ -358,26 +328,26 @@ def check_module_indexes(ctx, files):
|
|||
missing_modules_in_index = set(modules) - set(module_index)
|
||||
if missing_modules_in_index:
|
||||
exitcode += 1
|
||||
utils.error(
|
||||
"The module index at {} is missing the following modules: {}",
|
||||
path,
|
||||
", ".join(missing_modules_in_index),
|
||||
ctx.error(
|
||||
f"The module index at {path} is missing the following modules: "
|
||||
f"{', '.join(missing_modules_in_index)}"
|
||||
)
|
||||
extra_modules_in_index = set(module_index) - set(modules)
|
||||
if extra_modules_in_index:
|
||||
exitcode += 1
|
||||
utils.error(
|
||||
"The module index at {} has extra modules(non existing): {}",
|
||||
path,
|
||||
", ".join(extra_modules_in_index),
|
||||
ctx.error(
|
||||
f"The module index at {path} has extra modules(non existing): "
|
||||
f"{', '.join(extra_modules_in_index)}"
|
||||
)
|
||||
return exitcode
|
||||
|
||||
|
||||
@task(iterable=["files"])
|
||||
def check_stray(ctx, files):
|
||||
def check_stray(ctx: Context, files: list[pathlib.Path]) -> int:
|
||||
exitcode = 0
|
||||
exclude_paths = (
|
||||
exclude_pathlib_paths: tuple[pathlib.Path, ...]
|
||||
exclude_paths: tuple[str, ...]
|
||||
|
||||
exclude_pathlib_paths = (
|
||||
DOCS_DIR / "_inc",
|
||||
DOCS_DIR / "ref" / "cli" / "_includes",
|
||||
DOCS_DIR / "ref" / "cli",
|
||||
|
@ -412,41 +382,50 @@ def check_stray(ctx, files):
|
|||
DOCS_DIR / "ref" / "states" / "writing.rst",
|
||||
DOCS_DIR / "topics",
|
||||
)
|
||||
exclude_paths = tuple(str(p.relative_to(CODE_DIR)) for p in exclude_paths)
|
||||
exclude_paths = tuple(
|
||||
str(p.relative_to(tools.utils.REPO_ROOT)) for p in exclude_pathlib_paths
|
||||
)
|
||||
files = build_docs_paths(files)
|
||||
for path in files:
|
||||
if not str(path).startswith(str((DOCS_DIR / "ref").relative_to(CODE_DIR))):
|
||||
if not str(path).startswith(
|
||||
str((DOCS_DIR / "ref").relative_to(tools.utils.REPO_ROOT))
|
||||
):
|
||||
continue
|
||||
if str(path).startswith(exclude_paths):
|
||||
continue
|
||||
if path.name in ("index.rst", "glossary.rst", "faq.rst", "README.rst"):
|
||||
continue
|
||||
try:
|
||||
python_module = doc_path_to_python_module[path]
|
||||
except KeyError:
|
||||
if path not in DOC_PATH_TO_PYTHON_MODULE:
|
||||
contents = path.read_text()
|
||||
if ".. _virtual-" in contents:
|
||||
continue
|
||||
exitcode += 1
|
||||
utils.error(
|
||||
"The doc at {} doesn't have a corresponding python module and is"
|
||||
" considered a stray doc. Please remove it.",
|
||||
path,
|
||||
ctx.error(
|
||||
f"The doc at {path} doesn't have a corresponding python module "
|
||||
"and is considered a stray doc. Please remove it."
|
||||
)
|
||||
return exitcode
|
||||
|
||||
|
||||
@task(iterable=["files"])
|
||||
def check(ctx, files):
|
||||
@cgroup.command(
|
||||
name="check",
|
||||
arguments={
|
||||
"files": {
|
||||
"help": "List of files to check",
|
||||
"nargs": "*",
|
||||
}
|
||||
},
|
||||
)
|
||||
def check(ctx: Context, files: list[pathlib.Path]) -> None:
|
||||
exitcode = 0
|
||||
utils.info("Checking inline :doc: markup")
|
||||
ctx.info("Checking inline :doc: markup")
|
||||
exitcode += check_inline_markup(ctx, files)
|
||||
utils.info("Checking python module stubs")
|
||||
ctx.info("Checking python module stubs")
|
||||
exitcode += check_stubs(ctx, files)
|
||||
utils.info("Checking virtual modules")
|
||||
ctx.info("Checking virtual modules")
|
||||
exitcode += check_virtual(ctx, files)
|
||||
utils.info("Checking stray docs")
|
||||
ctx.info("Checking stray docs")
|
||||
exitcode += check_stray(ctx, files)
|
||||
utils.info("Checking doc module indexes")
|
||||
ctx.info("Checking doc module indexes")
|
||||
exitcode += check_module_indexes(ctx, files)
|
||||
utils.exit_invoke(exitcode)
|
||||
ctx.exit(exitcode)
|
|
@ -1,10 +1,10 @@
|
|||
"""
|
||||
tasks.docstrings
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
Docstrings related tasks
|
||||
Check salt code base for for missing or wrong docstrings.
|
||||
"""
|
||||
# pylint: disable=resource-leakage
|
||||
# Skip mypy checks since it will follow into Salt which doesn't yet have proper types defined
|
||||
# mypy: ignore-errors
|
||||
# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated
|
||||
from __future__ import annotations
|
||||
|
||||
import ast
|
||||
import os
|
||||
|
@ -13,16 +13,15 @@ import re
|
|||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from invoke import task # pylint: disable=3rd-party-module-not-gated
|
||||
from ptscripts import Context, command_group
|
||||
|
||||
import tools.utils
|
||||
from salt.loader import SALT_INTERNAL_LOADERS_PATHS
|
||||
from salt.version import SaltStackVersion
|
||||
from tasks import utils
|
||||
|
||||
CODE_DIR = pathlib.Path(__file__).resolve().parent.parent
|
||||
SALT_CODE_DIR = CODE_DIR / "salt"
|
||||
SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt"
|
||||
SALT_MODULES_PATH = SALT_CODE_DIR / "modules"
|
||||
THIS_FILE = pathlib.Path(__file__).relative_to(CODE_DIR)
|
||||
THIS_FILE = pathlib.Path(__file__).relative_to(tools.utils.REPO_ROOT)
|
||||
|
||||
MISSING_DOCSTRINGS = {
|
||||
"salt/auth/django.py": ["is_connection_usable"],
|
||||
|
@ -141,7 +140,6 @@ MISSING_DOCSTRINGS = {
|
|||
"salt/pillar/gpg.py": ["ext_pillar"],
|
||||
"salt/pillar/makostack.py": ["ext_pillar"],
|
||||
"salt/pillar/nacl.py": ["ext_pillar"],
|
||||
"salt/pillar/stack.py": ["ext_pillar"],
|
||||
"salt/proxy/cisconso.py": ["init"],
|
||||
"salt/proxy/esxi.py": ["is_connected_via_vcenter"],
|
||||
"salt/proxy/fx2.py": ["host"],
|
||||
|
@ -297,7 +295,6 @@ MISSING_DOCSTRINGS = {
|
|||
"iter_entry_points",
|
||||
],
|
||||
"salt/utils/error.py": ["pack_exception"],
|
||||
"salt/utils/etcd_util.py": ["get_conn", "tree"],
|
||||
"salt/utils/find.py": ["path_depth"],
|
||||
"salt/utils/gzip_util.py": ["open_fileobj", "uncompress", "open"],
|
||||
"salt/utils/icinga2.py": ["get_certs_path"],
|
||||
|
@ -308,7 +305,6 @@ MISSING_DOCSTRINGS = {
|
|||
"regex_escape",
|
||||
],
|
||||
"salt/utils/listdiffer.py": ["list_diff"],
|
||||
"salt/utils/master.py": ["get_master_key", "ping_all_connected_minions"],
|
||||
"salt/utils/namecheap.py": [
|
||||
"atts_to_dict",
|
||||
"get_opts",
|
||||
|
@ -332,7 +328,6 @@ MISSING_DOCSTRINGS = {
|
|||
],
|
||||
"salt/utils/openstack/swift.py": ["mkdirs", "check_swift"],
|
||||
"salt/utils/pkg/__init__.py": ["split_comparison"],
|
||||
"salt/utils/process.py": ["systemd_notify_call", "default_signals"],
|
||||
"salt/utils/profile.py": ["activate_profile", "output_profile"],
|
||||
"salt/utils/pyobjects.py": ["need_salt"],
|
||||
"salt/utils/reclass.py": [
|
||||
|
@ -360,13 +355,6 @@ MISSING_DOCSTRINGS = {
|
|||
"salt/utils/ssh.py": ["key_is_encrypted"],
|
||||
"salt/utils/stringio.py": ["is_writable", "is_stringio", "is_readable"],
|
||||
"salt/utils/stringutils.py": ["random"],
|
||||
"salt/utils/templates.py": [
|
||||
"wrap_tmpl_func",
|
||||
"render_mako_tmpl",
|
||||
"render_jinja_tmpl",
|
||||
"render_wempy_tmpl",
|
||||
],
|
||||
"salt/utils/verify.py": ["verify_logs_filter"],
|
||||
"salt/utils/virtualbox.py": [
|
||||
"machine_get_machinestate_str",
|
||||
"machine_get_machinestate_tuple",
|
||||
|
@ -380,13 +368,10 @@ MISSING_DOCSTRINGS = {
|
|||
],
|
||||
"salt/utils/yamlloader.py": ["load"],
|
||||
"salt/utils/yamlloader_old.py": ["load"],
|
||||
"salt/utils/zeromq.py": ["check_ipc_path_max_len"],
|
||||
}
|
||||
MISSING_EXAMPLES = {
|
||||
"salt/modules/acme.py": ["has", "renew_by", "needs_renewal"],
|
||||
"salt/modules/ansiblegate.py": ["help", "list_"],
|
||||
"salt/modules/apkpkg.py": ["purge"],
|
||||
"salt/modules/aptpkg.py": ["expand_repo_def"],
|
||||
"salt/modules/arista_pyeapi.py": ["get_connection"],
|
||||
"salt/modules/artifactory.py": [
|
||||
"get_latest_release",
|
||||
|
@ -475,7 +460,6 @@ MISSING_EXAMPLES = {
|
|||
"salt/modules/boto_ssm.py": ["get_parameter", "delete_parameter", "put_parameter"],
|
||||
"salt/modules/capirca_acl.py": ["get_filter_pillar", "get_term_pillar"],
|
||||
"salt/modules/ceph.py": ["zap"],
|
||||
"salt/modules/chroot.py": ["exist"],
|
||||
"salt/modules/ciscoconfparse_mod.py": [
|
||||
"find_objects",
|
||||
"find_objects_wo_child",
|
||||
|
@ -489,7 +473,6 @@ MISSING_EXAMPLES = {
|
|||
"set_data_value",
|
||||
"apply_rollback",
|
||||
],
|
||||
"salt/modules/cp.py": ["envs", "recv", "recv_chunked"],
|
||||
"salt/modules/cryptdev.py": ["active"],
|
||||
"salt/modules/datadog_api.py": ["post_event"],
|
||||
"salt/modules/defaults.py": ["deepcopy", "update"],
|
||||
|
@ -608,7 +591,6 @@ MISSING_EXAMPLES = {
|
|||
"salt/modules/napalm_probes.py": ["delete_probes", "schedule_probes", "set_probes"],
|
||||
"salt/modules/netbox.py": ["get_", "filter_", "slugify"],
|
||||
"salt/modules/netmiko_mod.py": ["call", "multi_call", "get_connection"],
|
||||
"salt/modules/network.py": ["fqdns"],
|
||||
"salt/modules/neutronng.py": [
|
||||
"get_openstack_cloud",
|
||||
"compare_changes",
|
||||
|
@ -763,21 +745,13 @@ MISSING_EXAMPLES = {
|
|||
"register_vm",
|
||||
"get_vm_config",
|
||||
"get_vm_config_file",
|
||||
"list_licenses",
|
||||
"compare_vm_configs",
|
||||
"get_advanced_configs",
|
||||
"delete_advanced_configs",
|
||||
"create_vmfs_datastore",
|
||||
"get_vm",
|
||||
],
|
||||
"salt/modules/win_pkg.py": ["get_package_info"],
|
||||
"salt/modules/win_timezone.py": ["zone_compare"],
|
||||
"salt/modules/zabbix.py": [
|
||||
"substitute_params",
|
||||
"get_zabbix_id_mapper",
|
||||
"get_object_id_by_params",
|
||||
"compare_params",
|
||||
],
|
||||
"salt/modules/zk_concurrency.py": [
|
||||
"lock",
|
||||
"party_members",
|
||||
|
@ -827,8 +801,17 @@ you've made already.
|
|||
Whatever approach you decide to take, just drop a comment in the PR letting us know!
|
||||
"""
|
||||
|
||||
cgroup = command_group(name="docstrings", help=__doc__, parent="pre-commit")
|
||||
|
||||
def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: str):
|
||||
|
||||
def annotate(
|
||||
ctx: Context,
|
||||
kind: str,
|
||||
fpath: pathlib.Path,
|
||||
start_lineno: int,
|
||||
end_lineno: int,
|
||||
message: str,
|
||||
) -> None:
|
||||
if kind not in ("warning", "error"):
|
||||
raise RuntimeError("The annotation kind can only be one of 'warning', 'error'.")
|
||||
if os.environ.get("GH_ACTIONS_ANNOTATE") is None:
|
||||
|
@ -836,7 +819,7 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message:
|
|||
|
||||
github_output = os.environ.get("GITHUB_OUTPUT")
|
||||
if github_output is None:
|
||||
utils.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.")
|
||||
ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.")
|
||||
return
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
@ -846,40 +829,52 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message:
|
|||
message.rstrip().replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A")
|
||||
)
|
||||
# Print it to stdout so that the GitHub runner pick's it up and adds the annotation
|
||||
print(
|
||||
ctx.print(
|
||||
f"::{kind} file={fpath},line={start_lineno},endLine={end_lineno}::{message}",
|
||||
file=sys.stdout,
|
||||
flush=True,
|
||||
)
|
||||
|
||||
|
||||
@task(iterable=["files"], positional=["files"])
|
||||
def check(ctx, files, check_proper_formatting=False, error_on_known_failures=False):
|
||||
@cgroup.command(
|
||||
name="check",
|
||||
arguments={
|
||||
"files": {
|
||||
"help": "List of files to check",
|
||||
"nargs": "*",
|
||||
},
|
||||
"suppress_warnings": {
|
||||
"help": "Supress warning messages on known issues",
|
||||
},
|
||||
"check_proper_formatting": {
|
||||
"help": "Run formatting checks on docstrings",
|
||||
},
|
||||
"error_on_known_failures": {
|
||||
"help": "Raise an error on known failures",
|
||||
},
|
||||
},
|
||||
)
|
||||
def check_docstrings(
|
||||
ctx: Context,
|
||||
files: list[pathlib.Path],
|
||||
suppress_warnings: bool = False,
|
||||
check_proper_formatting: bool = False,
|
||||
error_on_known_failures: bool = False,
|
||||
) -> None:
|
||||
"""
|
||||
Check salt's docstrings
|
||||
"""
|
||||
# CD into Salt's repo root directory
|
||||
ctx.cd(CODE_DIR)
|
||||
|
||||
# Unfortunately invoke does not support nargs.
|
||||
# We migth have been passed --files="foo.py bar.py"
|
||||
# Turn that into a list of paths
|
||||
_files = []
|
||||
for path in files:
|
||||
if not path:
|
||||
continue
|
||||
_files.extend(path.split())
|
||||
if not _files:
|
||||
if not files:
|
||||
_files = SALT_CODE_DIR.rglob("*.py")
|
||||
else:
|
||||
_files = [pathlib.Path(fname) for fname in _files]
|
||||
|
||||
_files = [path.resolve() for path in _files]
|
||||
_files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"]
|
||||
|
||||
errors = 0
|
||||
exitcode = 0
|
||||
warnings = 0
|
||||
for path in _files:
|
||||
if str(path).startswith(str(tools.utils.REPO_ROOT / "salt" / "ext")):
|
||||
continue
|
||||
contents = path.read_text()
|
||||
try:
|
||||
module = ast.parse(path.read_text(), filename=str(path))
|
||||
|
@ -889,10 +884,11 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
if error:
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
utils.error(
|
||||
"The module '{}' does not provide a proper `{}` version: {!r} is not valid.",
|
||||
path.relative_to(CODE_DIR),
|
||||
*error,
|
||||
ctx.error(
|
||||
"The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format(
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
*error,
|
||||
)
|
||||
)
|
||||
|
||||
for funcdef in [
|
||||
|
@ -904,17 +900,19 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
if error:
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
utils.error(
|
||||
"The module '{}' does not provide a proper `{}` version: {!r} is not valid.",
|
||||
path.relative_to(CODE_DIR),
|
||||
*error,
|
||||
ctx.error(
|
||||
"The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format(
|
||||
path,
|
||||
*error,
|
||||
)
|
||||
)
|
||||
annotate(
|
||||
ctx,
|
||||
"error",
|
||||
path.relative_to(CODE_DIR),
|
||||
path,
|
||||
funcdef.lineno,
|
||||
funcdef.body[0].lineno,
|
||||
"Version {1:r!} is not valid for {0!r}".format(*error),
|
||||
"Version {1!r} is not valid for {0!r}".format(*error),
|
||||
)
|
||||
|
||||
if not str(path).startswith(SALT_INTERNAL_LOADERS_PATHS):
|
||||
|
@ -922,7 +920,7 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
continue
|
||||
|
||||
funcname = funcdef.name
|
||||
relpath = str(path.relative_to(CODE_DIR))
|
||||
relpath = str(path.relative_to(tools.utils.REPO_ROOT))
|
||||
|
||||
# We're dealing with a salt loader module
|
||||
if funcname.startswith("_"):
|
||||
|
@ -935,14 +933,14 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
and error_on_known_failures is False
|
||||
):
|
||||
warnings += 1
|
||||
utils.warn(
|
||||
"The function '{}' on '{}' does not have a docstring",
|
||||
funcname,
|
||||
relpath,
|
||||
)
|
||||
if suppress_warnings is False:
|
||||
ctx.warn(
|
||||
f"The function '{funcname}' on '{relpath}' does not have a docstring"
|
||||
)
|
||||
annotate(
|
||||
ctx,
|
||||
"warning",
|
||||
path.relative_to(CODE_DIR),
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
funcdef.lineno,
|
||||
funcdef.body[0].lineno,
|
||||
"Missing docstring",
|
||||
|
@ -950,14 +948,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
continue
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
utils.error(
|
||||
"The function '{}' on '{}' does not have a docstring",
|
||||
funcname,
|
||||
relpath,
|
||||
ctx.error(
|
||||
f"The function '{funcname}' on '{relpath}' does not have a docstring"
|
||||
)
|
||||
annotate(
|
||||
ctx,
|
||||
"error",
|
||||
path.relative_to(CODE_DIR),
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
funcdef.lineno,
|
||||
funcdef.body[0].lineno,
|
||||
"Missing docstring",
|
||||
|
@ -966,14 +963,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
elif funcname in MISSING_DOCSTRINGS.get(relpath, ()):
|
||||
# This was previously a know function with a missing docstring.
|
||||
# Warn about it so that it get's removed from this list
|
||||
warnings += 1
|
||||
utils.warn(
|
||||
"The function '{}' on '{}' was previously known to not have a docstring, "
|
||||
"which is no longer the case. Please remove it from 'MISSING_DOCSTRINGS' ."
|
||||
"in '{}'",
|
||||
funcname,
|
||||
relpath,
|
||||
THIS_FILE,
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
ctx.error(
|
||||
f"The function '{funcname}' on '{relpath}' was previously known to not "
|
||||
"have a docstring, which is no longer the case. Please remove it from "
|
||||
f"'MISSING_DOCSTRINGS' in '{THIS_FILE}'"
|
||||
)
|
||||
|
||||
try:
|
||||
|
@ -993,14 +988,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
and error_on_known_failures is False
|
||||
):
|
||||
warnings += 1
|
||||
utils.warn(
|
||||
"The function '{}' on '{}' does not have a 'CLI Example:' in its docstring",
|
||||
funcname,
|
||||
relpath,
|
||||
)
|
||||
if suppress_warnings is False:
|
||||
ctx.warn(
|
||||
f"The function '{funcname}' on '{relpath}' does not have a "
|
||||
"'CLI Example:' in its docstring"
|
||||
)
|
||||
annotate(
|
||||
ctx,
|
||||
"warning",
|
||||
path.relative_to(CODE_DIR),
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
funcdef.lineno,
|
||||
funcdef.body[0].lineno,
|
||||
"Missing 'CLI Example:' in docstring",
|
||||
|
@ -1008,14 +1004,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
continue
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
utils.error(
|
||||
"The function '{}' on '{}' does not have a 'CLI Example:' in its docstring",
|
||||
funcname,
|
||||
relpath,
|
||||
ctx.error(
|
||||
f"The function '{funcname}' on '{relpath}' does not have a 'CLI Example:' in its docstring"
|
||||
)
|
||||
annotate(
|
||||
ctx,
|
||||
"error",
|
||||
path.relative_to(CODE_DIR),
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
funcdef.lineno,
|
||||
funcdef.body[0].lineno,
|
||||
"Missing 'CLI Example:' in docstring",
|
||||
|
@ -1024,14 +1019,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
elif funcname in MISSING_EXAMPLES.get(relpath, ()):
|
||||
# This was previously a know function with a missing CLI example
|
||||
# Warn about it so that it get's removed from this list
|
||||
warnings += 1
|
||||
utils.warn(
|
||||
"The function '{}' on '{}' was previously known to not have a CLI Example, "
|
||||
"which is no longer the case. Please remove it from 'MISSING_EXAMPLES'. "
|
||||
"in '{}'",
|
||||
funcname,
|
||||
relpath,
|
||||
THIS_FILE,
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
ctx.error(
|
||||
f"The function '{funcname}' on '{relpath}' was previously known to not "
|
||||
"have a CLI Example, which is no longer the case. Please remove it from "
|
||||
f"'MISSING_EXAMPLES' in '{THIS_FILE}'"
|
||||
)
|
||||
|
||||
if check_proper_formatting is False:
|
||||
|
@ -1042,20 +1035,22 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
if _check_cli_example_proper_formatting(docstring) is False:
|
||||
errors += 1
|
||||
exitcode = 1
|
||||
utils.error(
|
||||
ctx.error(
|
||||
"The function {!r} on '{}' does not have a proper 'CLI Example:' section in "
|
||||
"its docstring. The proper format is:\n"
|
||||
"CLI Example:\n"
|
||||
"\n"
|
||||
".. code-block:: bash\n"
|
||||
"\n"
|
||||
" salt '*' <insert example here>\n",
|
||||
funcdef.name,
|
||||
path.relative_to(CODE_DIR),
|
||||
" salt '*' <insert example here>\n".format(
|
||||
funcdef.name,
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
)
|
||||
)
|
||||
annotate(
|
||||
ctx,
|
||||
"warning",
|
||||
path.relative_to(CODE_DIR),
|
||||
path.relative_to(tools.utils.REPO_ROOT),
|
||||
funcdef.lineno,
|
||||
funcdef.body[0].lineno,
|
||||
"Wrong format in 'CLI Example:' in docstring.\n"
|
||||
|
@ -1072,15 +1067,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal
|
|||
path.write_text(contents)
|
||||
|
||||
if warnings:
|
||||
utils.warn("Found {} warnings", warnings)
|
||||
ctx.warn(f"Found {warnings} warnings")
|
||||
if exitcode:
|
||||
utils.error("Found {} errors", errors)
|
||||
ctx.error(f"Found {errors} errors")
|
||||
if os.environ.get("GH_ACTIONS_ANNOTATE") and (warnings or errors):
|
||||
github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY")
|
||||
if github_step_summary:
|
||||
with open(github_step_summary, "w", encoding="utf-8") as wfh:
|
||||
wfh.write(SUMMARY)
|
||||
utils.exit_invoke(exitcode)
|
||||
ctx.exit(exitcode)
|
||||
|
||||
|
||||
CHECK_VALID_VERSION_RE = re.compile(
|
Loading…
Add table
Reference in a new issue