1 from lib2to3.pgen2.token import * # noqa
2 from typing import Callable, Iterable, Iterator, Tuple
4 _Coord = Tuple[int, int]
5 _TokenEater = Callable[[int, str, _Coord, _Coord, str], None]
6 _TokenInfo = Tuple[int, str, _Coord, _Coord, str]
8 class TokenError(Exception): ...
9 class StopTokenizing(Exception): ...
11 def tokenize(readline: Callable[[], str], tokeneater: _TokenEater = ...) -> None: ...
17 def __init__(self) -> None: ...
18 def add_whitespace(self, start: _Coord) -> None: ...
19 def untokenize(self, iterable: Iterable[_TokenInfo]) -> str: ...
20 def compat(self, token: tuple[int, str], iterable: Iterable[_TokenInfo]) -> None: ...
22 def untokenize(iterable: Iterable[_TokenInfo]) -> str: ...
23 def generate_tokens(readline: Callable[[], str]) -> Iterator[_TokenInfo]: ...