1 # Stubs for lib2to3.pgen2.tokenize (Python 3.6)
\r
2 # NOTE: Only elements from __all__ are present.
\r
4 from typing import Callable, Iterable, Iterator, List, Text, Tuple
\r
5 from lib2to3.pgen2.token import * # noqa
\r
8 _Coord = Tuple[int, int]
\r
9 _TokenEater = Callable[[int, Text, _Coord, _Coord, Text], None]
\r
10 _TokenInfo = Tuple[int, Text, _Coord, _Coord, Text]
\r
13 class TokenError(Exception): ...
\r
14 class StopTokenizing(Exception): ...
\r
16 def tokenize(readline: Callable[[], Text], tokeneater: _TokenEater = ...) -> None: ...
\r
22 def __init__(self) -> None: ...
\r
23 def add_whitespace(self, start: _Coord) -> None: ...
\r
24 def untokenize(self, iterable: Iterable[_TokenInfo]) -> Text: ...
\r
25 def compat(self, token: Tuple[int, Text], iterable: Iterable[_TokenInfo]) -> None: ...
\r
27 def untokenize(iterable: Iterable[_TokenInfo]) -> Text: ...
\r
28 def generate_tokens(
\r
29 readline: Callable[[], Text]
\r
30 ) -> Iterator[_TokenInfo]: ...
\r