massive update, probably broken
[dotfiles/.git] / .config / coc / extensions / node_modules / coc-pyright / node_modules / pyright / dist / typeshed-fallback / stdlib / tokenize.pyi
1 import sys
2 from _typeshed import StrOrBytesPath
3 from builtins import open as _builtin_open
4 from token import *  # noqa: F403
5 from typing import Any, Callable, Generator, Iterable, NamedTuple, Pattern, Sequence, TextIO, Tuple, Union
6
7 if sys.version_info < (3, 7):
8     COMMENT: int
9     NL: int
10     ENCODING: int
11
12 cookie_re: Pattern[str]
13 blank_re: Pattern[bytes]
14
15 _Position = Tuple[int, int]
16
17 class _TokenInfo(NamedTuple):
18     type: int
19     string: str
20     start: _Position
21     end: _Position
22     line: str
23
24 class TokenInfo(_TokenInfo):
25     @property
26     def exact_type(self) -> int: ...
27
28 # Backwards compatible tokens can be sequences of a shorter length too
29 _Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]
30
31 class TokenError(Exception): ...
32 class StopTokenizing(Exception): ...  # undocumented
33
34 class Untokenizer:
35     tokens: list[str]
36     prev_row: int
37     prev_col: int
38     encoding: str | None
39     def __init__(self) -> None: ...
40     def add_whitespace(self, start: _Position) -> None: ...
41     def untokenize(self, iterable: Iterable[_Token]) -> str: ...
42     def compat(self, token: Sequence[int | str], iterable: Iterable[_Token]) -> None: ...
43
44 # the docstring says "returns bytes" but is incorrect --
45 # if the ENCODING token is missing, it skips the encode
46 def untokenize(iterable: Iterable[_Token]) -> Any: ...
47 def detect_encoding(readline: Callable[[], bytes]) -> tuple[str, Sequence[bytes]]: ...
48 def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...
49 def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...  # undocumented
50 def open(filename: StrOrBytesPath | int) -> TextIO: ...
51 def group(*choices: str) -> str: ...  # undocumented
52 def any(*choices: str) -> str: ...  # undocumented
53 def maybe(*choices: str) -> str: ...  # undocumented
54
55 Whitespace: str  # undocumented
56 Comment: str  # undocumented
57 Ignore: str  # undocumented
58 Name: str  # undocumented
59
60 Hexnumber: str  # undocumented
61 Binnumber: str  # undocumented
62 Octnumber: str  # undocumented
63 Decnumber: str  # undocumented
64 Intnumber: str  # undocumented
65 Exponent: str  # undocumented
66 Pointfloat: str  # undocumented
67 Expfloat: str  # undocumented
68 Floatnumber: str  # undocumented
69 Imagnumber: str  # undocumented
70 Number: str  # undocumented
71
72 def _all_string_prefixes() -> set[str]: ...  # undocumented
73
74 StringPrefix: str  # undocumented
75
76 Single: str  # undocumented
77 Double: str  # undocumented
78 Single3: str  # undocumented
79 Double3: str  # undocumented
80 Triple: str  # undocumented
81 String: str  # undocumented
82
83 if sys.version_info < (3, 7):
84     Operator: str  # undocumented
85     Bracket: str  # undocumented
86
87 Special: str  # undocumented
88 Funny: str  # undocumented
89
90 PlainToken: str  # undocumented
91 Token: str  # undocumented
92
93 ContStr: str  # undocumented
94 PseudoExtras: str  # undocumented
95 PseudoToken: str  # undocumented
96
97 endpats: dict[str, str]  # undocumented
98 single_quoted: set[str]  # undocumented
99 triple_quoted: set[str]  # undocumented
100
101 tabsize: int  # undocumented