Skip to content

refactor: update parsimonious to 0.10.0 #8730

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 13 commits into from
Sep 13, 2022
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 0 additions & 1 deletion stubs/parsimonious/@tests/stubtest_allowlist.txt
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
parsimonious.nodes.Node.__repr__
parsimonious.nodes.RuleDecoratorMeta.__new__
2 changes: 1 addition & 1 deletion stubs/parsimonious/METADATA.toml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = "0.9.*"
version = "0.10.*"
4 changes: 2 additions & 2 deletions stubs/parsimonious/parsimonious/__init__.pyi
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
from parsimonious.exceptions import (
BadGrammar as BadGrammar,
BadGrammar as BadGrammer,
IncompleteParseError as IncompleteParseError,
ParseError as ParseError,
UndefinedLabel as UndefinedLabel,
)
from parsimonious.grammar import Grammar as Grammar, TokenGrammar as TokenGrammar
from parsimonious.grammar import Grammar as Grammar, TokenGrammar as TokenGrammer
from parsimonious.nodes import NodeVisitor as NodeVisitor, VisitationError as VisitationError, rule as rule
3 changes: 3 additions & 0 deletions stubs/parsimonious/parsimonious/exceptions.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@ class ParseError(StrAndRepr, Exception):
text: str
pos: int
expr: Expression | None

def __init__(self, text: str, pos: int = ..., expr: Expression | None = ...) -> None: ...
def line(self) -> int: ...
def column(self) -> int: ...

class LeftRecursionError(ParseError): ...
class IncompleteParseError(ParseError): ...

class VisitationError(Exception):
Expand All @@ -21,4 +23,5 @@ class BadGrammar(StrAndRepr, Exception): ...

class UndefinedLabel(BadGrammar):
label: LazyReference

def __init__(self, label: LazyReference) -> None: ...
32 changes: 24 additions & 8 deletions stubs/parsimonious/parsimonious/expressions.pyi
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import collections.abc
from _typeshed import Self
from collections.abc import Callable, Mapping
from re import Pattern
from typing import Any, Union
Expand All @@ -9,20 +10,23 @@ from parsimonious.grammar import Grammar
from parsimonious.nodes import Node
from parsimonious.utils import StrAndRepr

MARKER: Any

_CALLABLE_RETURN_TYPE: TypeAlias = Union[int, tuple[int, list[Node]], Node, None]
_CALLABLE_TYPE: TypeAlias = (
Callable[[str, int], _CALLABLE_RETURN_TYPE]
| Callable[[str, int, Mapping[tuple[int, int], Node], ParseError, Grammar], _CALLABLE_RETURN_TYPE]
)

def is_callable(value: object) -> bool: ...
def expression(callable: _CALLABLE_TYPE, rule_name: str, grammar: Grammar) -> Expression: ...

IN_PROGRESS: object

class Expression(StrAndRepr):
name: str
identity_tuple: tuple[str]

def __init__(self, name: str = ...) -> None: ...
def resolve_refs(self: Self, rule_map: Mapping[str, Expression]) -> Self: ...
def parse(self, text: str, pos: int = ...) -> Node: ...
def match(self, text: str, pos: int = ...) -> Node: ...
def match_core(self, text: str, pos: int, cache: Mapping[tuple[int, int], Node], error: ParseError) -> Node: ...
Expand All @@ -31,6 +35,7 @@ class Expression(StrAndRepr):
class Literal(Expression):
literal: str
identity_tuple: tuple[str, str] # type: ignore[assignment]

def __init__(self, literal: str, name: str = ...) -> None: ...

class TokenMatcher(Literal): ...
Expand All @@ -53,15 +58,26 @@ class Regex(Expression):

class Compound(Expression):
members: collections.abc.Sequence[Expression]

def __init__(self, *members: Expression, **kwargs: Any) -> None: ...
def resolve_refs(self: Self, rule_map: Mapping[str, Expression]) -> Self: ...

class Sequence(Compound): ...
class OneOf(Compound): ...
class Lookahead(Compound): ...
class Not(Compound): ...
class Optional(Compound): ...
class ZeroOrMore(Compound): ...

class OneOrMore(Compound):
class Lookahead(Compound):
negativity: bool

def __init__(self, member: Expression, *, negative: bool = ..., **kwargs: Any) -> None: ...

def Not(term: Expression) -> Lookahead: ...

class Quantifier(Compound):
min: int
def __init__(self, member: Expression, name: str = ..., min: int = ...) -> None: ...
max = float

def __init__(self, member: Expression, *, min: int = ..., max: float = ..., name: str = ..., **kwargs: Any) -> None: ...

def ZeroOrMore(member: Expression, name: str = ...) -> Quantifier: ...
def OneOrMore(member: Expression, name: str = ..., min: int = ...) -> Quantifier: ...
def Optional(member: Expression, name: str = ...) -> Quantifier: ...
27 changes: 14 additions & 13 deletions stubs/parsimonious/parsimonious/grammar.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ from collections import OrderedDict
from collections.abc import Callable, Mapping
from typing import Any, NoReturn

from parsimonious.expressions import _CALLABLE_TYPE, Expression, Literal, Lookahead, Not, OneOf, Regex, Sequence, TokenMatcher
from parsimonious.expressions import _CALLABLE_TYPE, Expression, Literal, Lookahead, OneOf, Regex, Sequence, TokenMatcher
from parsimonious.nodes import Node, NodeVisitor

class Grammar(OrderedDict[str, Expression]):
Expand All @@ -20,6 +20,7 @@ rule_syntax: str

class LazyReference(str):
name: str
def resolve_refs(self, rule_map: Mapping[str, Expression]) -> Expression: ...

class RuleVisitor(NodeVisitor):
quantifier_classes: dict[str, type[Expression]]
Expand All @@ -28,24 +29,24 @@ class RuleVisitor(NodeVisitor):
visit_atom: Callable[[RuleVisitor, Node, collections.abc.Sequence[Any]], Any]
custom_rules: dict[str, Expression]
def __init__(self, custom_rules: Mapping[str, Expression] | None = ...) -> None: ...
def visit_rules(
self, node: Node, rules_list: collections.abc.Sequence[Any]
) -> tuple[OrderedDict[str, Expression], Expression | None]: ...
def visit_parenthesized(self, node: Node, parenthesized: collections.abc.Sequence[Any]) -> Expression: ...
def visit_quantifier(self, node: Node, quantifier: collections.abc.Sequence[Any]) -> Node: ...
def visit_quantified(self, node: Node, quantified: collections.abc.Sequence[Any]) -> Expression: ...
def visit_lookahead_term(self, node: Node, lookahead_term: collections.abc.Sequence[Any]) -> Lookahead: ...
def visit_not_term(self, node: Node, not_term: collections.abc.Sequence[Any]) -> Lookahead: ...
def visit_rule(self, node: Node, rule: collections.abc.Sequence[Any]) -> Expression: ...
def visit_label(self, node: Node, label: collections.abc.Sequence[Any]) -> str: ...
def visit_sequence(self, node: Node, sequence: collections.abc.Sequence[Any]) -> Sequence: ...
def visit_ored(self, node: Node, ored: collections.abc.Sequence[Any]) -> OneOf: ...
def visit_or_term(self, node: Node, or_term: collections.abc.Sequence[Any]) -> Expression: ...
def visit_sequence(self, node: Node, sequence: collections.abc.Sequence[Any]) -> Sequence: ...
def visit_not_term(self, node: Node, not_term: collections.abc.Sequence[Any]) -> Not: ...
def visit_lookahead_term(self, node: Node, lookahead_term: collections.abc.Sequence[Any]) -> Lookahead: ...
def visit_quantified(self, node: Node, quantified: collections.abc.Sequence[Any]) -> Expression: ...
def visit_quantifier(self, node: Node, quantifier: collections.abc.Sequence[Any]) -> Node: ...
def visit_label(self, node: Node, label: collections.abc.Sequence[Any]) -> str: ...
def visit_reference(self, node: Node, reference: collections.abc.Sequence[Any]) -> LazyReference: ...
def visit_literal(self, node: Node, literal: collections.abc.Sequence[Any]) -> Literal: ...
def visit_spaceless_literal(self, spaceless_literal: Node, visited_children: collections.abc.Sequence[Any]) -> Literal: ...
def visit_regex(self, node: Node, regex: collections.abc.Sequence[Any]) -> Regex: ...
def visit_parenthesized(self, node: Node, parenthesized: collections.abc.Sequence[Any]) -> Expression: ...
def visit_spaceless_literal(self, spaceless_literal: Node, visited_children: collections.abc.Sequence[Any]) -> Literal: ...
def visit_literal(self, node: Node, literal: collections.abc.Sequence[Any]) -> Literal: ...
def generic_visit(self, node: Node, visited_children: collections.abc.Sequence[Any]) -> collections.abc.Sequence[Any] | Node: ... # type: ignore[override]
def visit_rules(
self, node: Node, rules_list: collections.abc.Sequence[Any]
) -> tuple[OrderedDict[str, Expression], Expression | None]: ...

class TokenRuleVisitor(RuleVisitor):
def visit_spaceless_literal(
Expand Down
3 changes: 2 additions & 1 deletion stubs/parsimonious/parsimonious/nodes.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ class Node:
@property
def text(self) -> str: ...
def prettily(self, error: Node | None = ...) -> str: ...
def __repr__(self, top_level: bool = ...) -> str: ...

class RegexNode(Node):
match: Match[str]
Expand All @@ -27,7 +28,7 @@ class RuleDecoratorMeta(type): ...

class NodeVisitor(metaclass=RuleDecoratorMeta):
grammar: Grammar | Any
unwrapped_exceptions: tuple[type[Exception], ...]
unwrapped_exceptions: tuple[type[BaseException], ...]
def visit(self, node: Node) -> Any: ...
def generic_visit(self, node: Node, visited_children: Sequence[Any]) -> NoReturn: ...
def parse(self, text: str, pos: int = ...) -> Node: ...
Expand Down