Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parsimonious stubs #7477

Merged
merged 18 commits into from
Mar 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions stubs/parsimonious/@tests/stubtest_allowlist.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
parsimonious.nodes.Node.__repr__
parsimonious.nodes.RuleDecoratorMeta.__new__
1 change: 1 addition & 0 deletions stubs/parsimonious/METADATA.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
version = "0.8.*"
8 changes: 8 additions & 0 deletions stubs/parsimonious/parsimonious/__init__.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from parsimonious.exceptions import (
jpy-git marked this conversation as resolved.
Show resolved Hide resolved
BadGrammar as BadGrammar,
IncompleteParseError as IncompleteParseError,
ParseError as ParseError,
UndefinedLabel as UndefinedLabel,
)
from parsimonious.grammar import Grammar as Grammar, TokenGrammar as TokenGrammar
from parsimonious.nodes import NodeVisitor as NodeVisitor, VisitationError as VisitationError, rule as rule
24 changes: 24 additions & 0 deletions stubs/parsimonious/parsimonious/exceptions.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from parsimonious.expressions import Expression
from parsimonious.grammar import LazyReference
from parsimonious.nodes import Node
from parsimonious.utils import StrAndRepr

class ParseError(StrAndRepr, Exception):
text: str
pos: int
expr: Expression | None
def __init__(self, text: str, pos: int = ..., expr: Expression | None = ...) -> None: ...
def line(self) -> int: ...
def column(self) -> int: ...

class IncompleteParseError(ParseError): ...

class VisitationError(Exception):
original_class: type[BaseException]
def __init__(self, exc: BaseException, exc_class: type[BaseException], node: Node) -> None: ...

class BadGrammar(StrAndRepr, Exception): ...

class UndefinedLabel(BadGrammar):
label: LazyReference
def __init__(self, label: LazyReference) -> None: ...
63 changes: 63 additions & 0 deletions stubs/parsimonious/parsimonious/expressions.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import typing
from typing import Any, Callable, Mapping, Pattern, Union

from parsimonious.exceptions import ParseError
from parsimonious.grammar import Grammar
from parsimonious.nodes import Node
from parsimonious.utils import StrAndRepr

MARKER: Any

_CALLABLE_RETURN_TYPE = Union[int, tuple[int, list[Node]], Node, None]
_CALLABLE_TYPE = (
Callable[[str, int], _CALLABLE_RETURN_TYPE]
| Callable[[str, int, Mapping[tuple[int, int], Node], ParseError, Grammar], _CALLABLE_RETURN_TYPE]
)

def expression(callable: _CALLABLE_TYPE, rule_name: str, grammar: Grammar) -> Expression: ...

class Expression(StrAndRepr):
name: str
identity_tuple: tuple[str]
def __init__(self, name: str = ...) -> None: ...
def parse(self, text: str, pos: int = ...) -> Node: ...
def match(self, text: str, pos: int = ...) -> Node: ...
def match_core(self, text: str, pos: int, cache: Mapping[tuple[int, int], Node], error: ParseError) -> Node: ...
def as_rule(self) -> str: ...

class Literal(Expression):
literal: str
identity_tuple: tuple[str, str] # type: ignore
def __init__(self, literal: str, name: str = ...) -> None: ...

class TokenMatcher(Literal): ...

class Regex(Expression):
re: Pattern[str]
identity_tuple: tuple[str, Pattern[str]] # type: ignore
def __init__(
self,
pattern: str,
name: str = ...,
ignore_case: bool = ...,
locale: bool = ...,
multiline: bool = ...,
dot_all: bool = ...,
unicode: bool = ...,
verbose: bool = ...,
) -> None: ...

class Compound(Expression):
members: typing.Sequence[Expression]
def __init__(self, *members: Expression, **kwargs: Any) -> None: ...

class Sequence(Compound): ...
class OneOf(Compound): ...
class Lookahead(Compound): ...
class Not(Compound): ...
class Optional(Compound): ...
class ZeroOrMore(Compound): ...

class OneOrMore(Compound):
min: int
def __init__(self, member: Expression, name: str = ..., min: int = ...) -> None: ...
53 changes: 53 additions & 0 deletions stubs/parsimonious/parsimonious/grammar.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import typing
from collections import OrderedDict
from typing import Any, Callable, Mapping, NoReturn

from parsimonious.expressions import _CALLABLE_TYPE, Expression, Literal, Lookahead, Not, OneOf, Regex, Sequence, TokenMatcher
from parsimonious.nodes import Node, NodeVisitor

class Grammar(OrderedDict[str, Expression]):
default_rule: Expression | Any
def __init__(self, rules: str = ..., **more_rules: Expression | _CALLABLE_TYPE) -> None: ...
def default(self, rule_name: str) -> Grammar: ...
def parse(self, text: str, pos: int = ...) -> Node: ...
def match(self, text: str, pos: int = ...) -> Node: ...

class TokenGrammar(Grammar): ...
class BootstrappingGrammar(Grammar): ...

rule_syntax: str

class LazyReference(str):
name: str

class RuleVisitor(NodeVisitor):
quantifier_classes: dict[str, type[Expression]]
visit_expression: Callable[[RuleVisitor, Node, typing.Sequence[Any]], Any]
visit_term: Callable[[RuleVisitor, Node, typing.Sequence[Any]], Any]
visit_atom: Callable[[RuleVisitor, Node, typing.Sequence[Any]], Any]
custom_rules: dict[str, Expression]
jpy-git marked this conversation as resolved.
Show resolved Hide resolved
def __init__(self, custom_rules: Mapping[str, Expression] | None = ...) -> None: ...
def visit_rules(
self, node: Node, rules_list: typing.Sequence[Any]
) -> tuple[OrderedDict[str, Expression], Expression | None]: ...
def visit_rule(self, node: Node, rule: typing.Sequence[Any]) -> Expression: ...
def visit_label(self, node: Node, label: typing.Sequence[Any]) -> str: ...
def visit_ored(self, node: Node, ored: typing.Sequence[Any]) -> OneOf: ...
def visit_or_term(self, node: Node, or_term: typing.Sequence[Any]) -> Expression: ...
def visit_sequence(self, node: Node, sequence: typing.Sequence[Any]) -> Sequence: ...
def visit_not_term(self, node: Node, not_term: typing.Sequence[Any]) -> Not: ...
def visit_lookahead_term(self, node: Node, lookahead_term: typing.Sequence[Any]) -> Lookahead: ...
def visit_quantified(self, node: Node, quantified: typing.Sequence[Any]) -> Expression: ...
def visit_quantifier(self, node: Node, quantifier: typing.Sequence[Any]) -> Node: ...
def visit_reference(self, node: Node, reference: typing.Sequence[Any]) -> LazyReference: ...
def visit_literal(self, node: Node, literal: typing.Sequence[Any]) -> Literal: ...
def visit_spaceless_literal(self, spaceless_literal: Node, visited_children: typing.Sequence[Any]) -> Literal: ...
def visit_regex(self, node: Node, regex: typing.Sequence[Any]) -> Regex: ...
def visit_parenthesized(self, node: Node, parenthesized: typing.Sequence[Any]) -> Expression: ...
def generic_visit(self, node: Node, visited_children: typing.Sequence[Any]) -> typing.Sequence[Any] | Node: ... # type: ignore

class TokenRuleVisitor(RuleVisitor):
def visit_spaceless_literal(self, spaceless_literal: Node, visited_children: typing.Sequence[Any]) -> TokenMatcher: ...
def visit_regex(self, node: Node, regex: typing.Sequence[Any]) -> NoReturn: ...

rule_grammar: Grammar
38 changes: 38 additions & 0 deletions stubs/parsimonious/parsimonious/nodes.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
from re import Match
from typing import Any, Callable, Iterator, NoReturn, Sequence, TypeVar

from parsimonious.exceptions import VisitationError as VisitationError
from parsimonious.expressions import Expression
from parsimonious.grammar import Grammar

class Node:
expr: Expression
full_text: str
start: int
end: int
children: Sequence[Node]
def __init__(self, expr: Expression, full_text: str, start: int, end: int, children: Sequence[Node] | None = ...) -> None: ...
@property
def expr_name(self) -> str: ...
def __iter__(self) -> Iterator[Node]: ...
@property
def text(self) -> str: ...
def prettily(self, error: Node | None = ...) -> str: ...

class RegexNode(Node):
match: Match[str]

class RuleDecoratorMeta(type): ...

class NodeVisitor(metaclass=RuleDecoratorMeta):
grammar: Grammar | Any
unwrapped_exceptions: tuple[type[Exception], ...]
def visit(self, node: Node) -> Any: ...
def generic_visit(self, node: Node, visited_children: Sequence[Any]) -> NoReturn: ...
def parse(self, text: str, pos: int = ...) -> Node: ...
def match(self, text: str, pos: int = ...) -> Node: ...
def lift_child(self, node: Node, children: Sequence[Any]) -> Any: ...

_CallableT = TypeVar("_CallableT", bound=Callable[..., Any])

def rule(rule_string: str) -> Callable[[_CallableT], _CallableT]: ...
10 changes: 10 additions & 0 deletions stubs/parsimonious/parsimonious/utils.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import ast
from typing import Any

class StrAndRepr: ...

def evaluate_string(string: str | ast.AST) -> Any: ...

class Token(StrAndRepr):
type: str
def __init__(self, type: str) -> None: ...