2 from _typeshed import StrOrBytesPath
3 from builtins import open as _builtin_open
4 from token import * # noqa: F403
5 from typing import Any, Callable, Generator, Iterable, NamedTuple, Pattern, Sequence, TextIO, Tuple, Union
7 if sys.version_info < (3, 7):
12 cookie_re: Pattern[str]
13 blank_re: Pattern[bytes]
15 _Position = Tuple[int, int]
17 class _TokenInfo(NamedTuple):
24 class TokenInfo(_TokenInfo):
26 def exact_type(self) -> int: ...
28 # Backwards compatible tokens can be sequences of a shorter length too
29 _Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]
31 class TokenError(Exception): ...
32 class StopTokenizing(Exception): ... # undocumented
39 def __init__(self) -> None: ...
40 def add_whitespace(self, start: _Position) -> None: ...
41 def untokenize(self, iterable: Iterable[_Token]) -> str: ...
42 def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ...
44 # the docstring says "returns bytes" but is incorrect --
45 # if the ENCODING token is missing, it skips the encode
46 def untokenize(iterable: Iterable[_Token]) -> Any: ...
47 def detect_encoding(readline: Callable[[], bytes]) -> tuple[str, Sequence[bytes]]: ...
48 def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
49 def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ... # undocumented
50 def open(filename: StrOrBytesPath | int) -> TextIO: ...
51 def group(*choices: str) -> str: ... # undocumented
52 def any(*choices: str) -> str: ... # undocumented
53 def maybe(*choices: str) -> str: ... # undocumented
55 Whitespace: str # undocumented
56 Comment: str # undocumented
57 Ignore: str # undocumented
58 Name: str # undocumented
60 Hexnumber: str # undocumented
61 Binnumber: str # undocumented
62 Octnumber: str # undocumented
63 Decnumber: str # undocumented
64 Intnumber: str # undocumented
65 Exponent: str # undocumented
66 Pointfloat: str # undocumented
67 Expfloat: str # undocumented
68 Floatnumber: str # undocumented
69 Imagnumber: str # undocumented
70 Number: str # undocumented
72 def _all_string_prefixes() -> set[str]: ... # undocumented
74 StringPrefix: str # undocumented
76 Single: str # undocumented
77 Double: str # undocumented
78 Single3: str # undocumented
79 Double3: str # undocumented
80 Triple: str # undocumented
81 String: str # undocumented
83 if sys.version_info < (3, 7):
84 Operator: str # undocumented
85 Bracket: str # undocumented
87 Special: str # undocumented
88 Funny: str # undocumented
90 PlainToken: str # undocumented
91 Token: str # undocumented
93 ContStr: str # undocumented
94 PseudoExtras: str # undocumented
95 PseudoToken: str # undocumented
97 endpats: dict[str, str] # undocumented
98 single_quoted: set[str] # undocumented
99 triple_quoted: set[str] # undocumented
101 tabsize: int # undocumented