1 from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Union, Sequence, TextIO, Tuple
\r
2 from builtins import open as _builtin_open
\r
4 from token import * # noqa: F403
\r
6 COMMENT = ... # type: int
\r
8 ENCODING = ... # type: int
\r
10 _Position = Tuple[int, int]
\r
12 _TokenInfo = NamedTuple('TokenInfo', [
\r
15 ('start', _Position),
\r
20 class TokenInfo(_TokenInfo):
\r
22 def exact_type(self) -> int: ...
\r
24 # Backwards compatible tokens can be sequences of a shorter length too
\r
25 _Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]
\r
27 class TokenError(Exception): ...
\r
28 class StopTokenizing(Exception): ...
\r
31 tokens = ... # type: List[str]
\r
32 prev_row = ... # type: int
\r
33 prev_col = ... # type: int
\r
34 encoding = ... # type: Optional[str]
\r
35 def __init__(self) -> None: ...
\r
36 def add_whitespace(self, start: _Position) -> None: ...
\r
37 def untokenize(self, iterable: Iterable[_Token]) -> str: ...
\r
38 def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ...
\r
40 def untokenize(iterable: Iterable[_Token]) -> Any: ...
\r
41 def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...
\r
42 def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
\r
43 def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...
\r
45 if sys.version_info >= (3, 6):
\r
46 from os import PathLike
\r
47 def open(filename: Union[str, bytes, int, PathLike]) -> TextIO: ...
\r
49 def open(filename: Union[str, bytes, int]) -> TextIO: ...
\r
51 # Names in __all__ with no definition:
\r