Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable Ruff D (pydocstyle) with pep257 convention #13326

Merged
merged 8 commits into from
Jan 2, 2025
7 changes: 4 additions & 3 deletions lib/ts_utils/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -184,7 +184,7 @@ def is_obsolete(self) -> bool:


class NoSuchStubError(ValueError):
"""Raise NoSuchStubError to indicate that a stubs/{distribution} directory doesn't exist"""
"""Raise NoSuchStubError to indicate that a stubs/{distribution} directory doesn't exist."""


@cache
Expand Down Expand Up @@ -302,9 +302,10 @@ def read_metadata(distribution: str) -> StubMetadata:


def update_metadata(distribution: str, **new_values: object) -> tomlkit.TOMLDocument:
"""Updates a distribution's METADATA.toml.
"""Update a distribution's METADATA.toml.

Return the updated TOML dictionary for use without having to open the file separately."""
Return the updated TOML dictionary for use without having to open the file separately.
"""
path = metadata_path(distribution)
try:
with path.open("rb") as file:
Expand Down
1 change: 0 additions & 1 deletion lib/ts_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,6 @@ def venv_python(venv_dir: Path) -> Path:
@cache
def parse_requirements() -> Mapping[str, Requirement]:
"""Return a dictionary of requirements from the requirements file."""

with REQUIREMENTS_PATH.open(encoding="UTF-8") as requirements_file:
stripped_lines = map(strip_comments, requirements_file)
stripped_more = [li for li in stripped_lines if not li.startswith("-")]
Expand Down
17 changes: 14 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ external = ["F821", "NQA", "Y"]
select = [
"ARG", # flake8-unused-arguments
"B", # flake8-bugbear
"D", # pydocstyle
"EXE", # flake8-executable
"FA", # flake8-future-annotations
"I", # isort
Expand Down Expand Up @@ -89,6 +90,7 @@ select = [
"PYI064", # `Final[Literal[{literal}]]` can be replaced with a bare Final
]
extend-safe-fixes = [
"D200", # One-line docstring should fit on one line
"UP036", # Remove unnecessary `sys.version_info` blocks
]
ignore = [
Expand All @@ -103,11 +105,15 @@ ignore = [
###
# Rules we don't want or don't agree with
###
# Slower and more verbose https://github.com/astral-sh/ruff/issues/7871
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# We're not a library, no need to document everything
"D1", # Missing docstring in ...
# Doesn't support split "summary line"
"D205", # 1 blank line required between summary line and description
# Used for direct, non-subclass type comparison, for example: `type(val) is str`
# see https://github.com/astral-sh/ruff/issues/6465
"E721", # Do not compare types, use `isinstance()`
# Slower and more verbose https://github.com/astral-sh/ruff/issues/7871
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
###
# False-positives, but already checked by type-checkers
###
Expand All @@ -134,11 +140,16 @@ ignore = [
"RUF022",
"RUF023",
]
# See comment on black's force-exclude config above
"*_pb2.pyi" = [
# Leave the docstrings as-is, matchin source
Avasam marked this conversation as resolved.
Show resolved Hide resolved
"D", # pydocstyle
# See comment on black's force-exclude config above
"E501", # Line too long
]

[tool.ruff.lint.pydocstyle]
convention = "pep257" # https://docs.astral.sh/ruff/settings/#lint_pydocstyle_convention

[tool.ruff.lint.isort]
split-on-trailing-comma = false
combine-as-imports = true
Expand Down
2 changes: 1 addition & 1 deletion scripts/sync_protobuf/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def extract_archive(archive_path: StrPath, destination: StrPath) -> None:
def run_protoc(
proto_paths: Iterable[StrPath], mypy_out: StrPath, proto_globs: Iterable[str], cwd: StrOrBytesPath | None = None
) -> str:
"""TODO: Describe parameters and return"""
"""TODO: Describe parameters and return."""
protoc_version = (
subprocess.run([sys.executable, "-m", "grpc_tools.protoc", "--version"], capture_output=True).stdout.decode().strip()
)
Expand Down
4 changes: 2 additions & 2 deletions scripts/sync_protobuf/google_protobuf.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@


def extract_python_version(file_path: Path) -> str:
"""Extract the Python version from https://github.com/protocolbuffers/protobuf/blob/main/version.json"""
"""Extract the Python version from https://github.com/protocolbuffers/protobuf/blob/main/version.json ."""
with open(file_path) as file:
data: dict[str, Any] = json.load(file)
# The root key will be the protobuf source code version
Expand All @@ -45,7 +45,7 @@ def extract_proto_file_paths(temp_dir: Path) -> list[str]:
"""
Roughly reproduce the subset of .proto files on the public interface
as described in py_proto_library calls in
https://github.com/protocolbuffers/protobuf/blob/main/python/dist/BUILD.bazel
https://github.com/protocolbuffers/protobuf/blob/main/python/dist/BUILD.bazel .
"""
with open(temp_dir / EXTRACTED_PACKAGE_DIR / "python" / "dist" / "BUILD.bazel") as file:
matched_lines = filter(None, (re.search(PROTO_FILE_PATTERN, line) for line in file))
Expand Down
2 changes: 1 addition & 1 deletion scripts/sync_protobuf/s2clientprotocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@


def extract_python_version(file_path: Path) -> str:
"""Extract Python version from s2clientprotocol's build file"""
"""Extract Python version from s2clientprotocol's build file."""
match = re.search(VERSION_PATTERN, file_path.read_text())
assert match
return match.group(1)
Expand Down
5 changes: 3 additions & 2 deletions scripts/sync_protobuf/tensorflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,15 @@
def move_tree(source: Path, destination: Path) -> None:
"""Move directory and merge if destination already exists.

Can't use shutil.move because it can't merge existing directories."""
Can't use shutil.move because it can't merge existing directories.
"""
print(f"Moving '{source}' to '{destination}'")
shutil.copytree(source, destination, dirs_exist_ok=True)
shutil.rmtree(source)


def post_creation() -> None:
"""Move third-party and fix imports"""
"""Move third-party and fix imports."""
print()
move_tree(STUBS_FOLDER / "tsl", STUBS_FOLDER / "tensorflow" / "tsl")
move_tree(STUBS_FOLDER / "xla", STUBS_FOLDER / "tensorflow" / "compiler" / "xla")
Expand Down
5 changes: 2 additions & 3 deletions tests/mypy_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class CommandLineArgs:


def valid_path(cmd_arg: str) -> Path:
"""Helper function for argument-parsing"""
"""Helper function for argument-parsing.""" # noqa: D401
Avasam marked this conversation as resolved.
Show resolved Hide resolved
path = Path(cmd_arg)
if not path.exists():
raise argparse.ArgumentTypeError(f'"{path}" does not exist in typeshed!')
Expand All @@ -72,7 +72,7 @@ def valid_path(cmd_arg: str) -> Path:


def remove_dev_suffix(version: str) -> str:
"""Helper function for argument-parsing"""
"""Helper function for argument-parsing.""" # noqa: D401
Avasam marked this conversation as resolved.
Show resolved Hide resolved
if version.endswith("-dev"):
return version[: -len("-dev")]
return version
Expand Down Expand Up @@ -303,7 +303,6 @@ def test_third_party_distribution(
Return a tuple, where the first element indicates mypy's return code
and the second element is the number of checked files.
"""

files: list[Path] = []
configurations: list[MypyDistConf] = []
seen_dists: set[str] = set()
Expand Down
4 changes: 2 additions & 2 deletions tests/pytype_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def create_parser() -> argparse.ArgumentParser:


def run_pytype(*, filename: str, python_version: str, missing_modules: Iterable[str]) -> str | None:
"""Runs pytype, returning the stderr if any."""
"""Run pytype, returning the stderr if any."""
if python_version not in _LOADERS:
options = pytype_config.Options.create("", parse_pyi=True, python_version=python_version)
# For simplicity, pretends missing modules are part of the stdlib.
Expand Down Expand Up @@ -107,7 +107,7 @@ def _get_relative(filename: str) -> str:


def _get_module_name(filename: str) -> str:
"""Converts a filename {subdir}/m.n/module/foo to module.foo."""
"""Convert a filename {subdir}/m.n/module/foo to module.foo."""
parts = _get_relative(filename).split(os.path.sep)
if parts[0] == "stdlib":
module_parts = parts[1:]
Expand Down
3 changes: 1 addition & 2 deletions tests/regr_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,7 @@


def distribution_with_test_cases(distribution_name: str) -> DistributionTests:
"""Helper function for argument-parsing."""

"""Helper function for argument-parsing.""" # noqa: D401
try:
return distribution_info(distribution_name)
except RuntimeError as exc:
Expand Down
2 changes: 1 addition & 1 deletion tests/stubtest_stdlib.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
"""Test typeshed's stdlib using stubtest
"""Test typeshed's stdlib using stubtest.

stubtest is a script in the mypy project that compares stubs to the actual objects at runtime.
Note that therefore the output of stubtest depends on which Python version it is run with.
Expand Down
2 changes: 1 addition & 1 deletion tests/stubtest_third_party.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/usr/bin/env python3
"""Test typeshed's third party stubs using stubtest"""
"""Test typeshed's third party stubs using stubtest."""

from __future__ import annotations

Expand Down
Loading