1 from _typeshed import StrPath
2 from lib2to3.pgen2 import grammar
3 from lib2to3.pgen2.tokenize import _TokenInfo
4 from typing import IO, Any, Iterable, Iterator, NoReturn
6 class PgenGrammar(grammar.Grammar): ...
11 generator: Iterator[_TokenInfo]
12 first: dict[str, dict[str, int]]
13 def __init__(self, filename: StrPath, stream: IO[str] | None = ...) -> None: ...
14 def make_grammar(self) -> PgenGrammar: ...
15 def make_first(self, c: PgenGrammar, name: str) -> dict[int, int]: ...
16 def make_label(self, c: PgenGrammar, label: str) -> int: ...
17 def addfirstsets(self) -> None: ...
18 def calcfirst(self, name: str) -> None: ...
19 def parse(self) -> tuple[dict[str, list[DFAState]], str]: ...
20 def make_dfa(self, start: NFAState, finish: NFAState) -> list[DFAState]: ...
21 def dump_nfa(self, name: str, start: NFAState, finish: NFAState) -> list[DFAState]: ...
22 def dump_dfa(self, name: str, dfa: Iterable[DFAState]) -> None: ...
23 def simplify_dfa(self, dfa: list[DFAState]) -> None: ...
24 def parse_rhs(self) -> tuple[NFAState, NFAState]: ...
25 def parse_alt(self) -> tuple[NFAState, NFAState]: ...
26 def parse_item(self) -> tuple[NFAState, NFAState]: ...
27 def parse_atom(self) -> tuple[NFAState, NFAState]: ...
28 def expect(self, type: int, value: Any | None = ...) -> str: ...
29 def gettoken(self) -> None: ...
30 def raise_error(self, msg: str, *args: Any) -> NoReturn: ...
33 arcs: list[tuple[str | None, NFAState]]
34 def __init__(self) -> None: ...
35 def addarc(self, next: NFAState, label: str | None = ...) -> None: ...
38 nfaset: dict[NFAState, Any]
40 arcs: dict[str, DFAState]
41 def __init__(self, nfaset: dict[NFAState, Any], final: NFAState) -> None: ...
42 def addarc(self, next: DFAState, label: str) -> None: ...
43 def unifystate(self, old: DFAState, new: DFAState) -> None: ...
44 def __eq__(self, other: Any) -> bool: ...
46 def generate_grammar(filename: StrPath = ...) -> PgenGrammar: ...