massive update, probably broken
[dotfiles/.git] / .config / coc / extensions / coc-python-data / languageServer.0.5.59 / Typeshed / stdlib / 3 / tokenize.pyi
diff --git a/.config/coc/extensions/coc-python-data/languageServer.0.5.59/Typeshed/stdlib/3/tokenize.pyi b/.config/coc/extensions/coc-python-data/languageServer.0.5.59/Typeshed/stdlib/3/tokenize.pyi
new file mode 100644 (file)
index 0000000..9508c64
--- /dev/null
@@ -0,0 +1,114 @@
+from typing import Any, Callable, Generator, Iterable, List, NamedTuple, Optional, Union, Sequence, TextIO, Tuple\r
+from builtins import open as _builtin_open\r
+import sys\r
+from token import *  # noqa: F403\r
+\r
+COMMENT = ...  # type: int\r
+NL = ...  # type: int\r
+ENCODING = ...  # type: int\r
+\r
+_Position = Tuple[int, int]\r
+\r
+_TokenInfo = NamedTuple('TokenInfo', [\r
+    ('type', int),\r
+    ('string', str),\r
+    ('start', _Position),\r
+    ('end', _Position),\r
+    ('line', str)\r
+])\r
+\r
+class TokenInfo(_TokenInfo):\r
+    @property\r
+    def exact_type(self) -> int: ...\r
+\r
+# Backwards compatible tokens can be sequences of a shorter length too\r
+_Token = Union[TokenInfo, Sequence[Union[int, str, _Position]]]\r
+\r
+class TokenError(Exception): ...\r
+class StopTokenizing(Exception): ...\r
+\r
+class Untokenizer:\r
+    tokens = ...  # type: List[str]\r
+    prev_row = ...  # type: int\r
+    prev_col = ...  # type: int\r
+    encoding = ...  # type: Optional[str]\r
+    def __init__(self) -> None: ...\r
+    def add_whitespace(self, start: _Position) -> None: ...\r
+    def untokenize(self, iterable: Iterable[_Token]) -> str: ...\r
+    def compat(self, token: Sequence[Union[int, str]], iterable: Iterable[_Token]) -> None: ...\r
+\r
+def untokenize(iterable: Iterable[_Token]) -> Any: ...\r
+def detect_encoding(readline: Callable[[], bytes]) -> Tuple[str, Sequence[bytes]]: ...\r
+def tokenize(readline: Callable[[], bytes]) -> Generator[TokenInfo, None, None]: ...\r
+def generate_tokens(readline: Callable[[], str]) -> Generator[TokenInfo, None, None]: ...\r
+\r
+if sys.version_info >= (3, 6):\r
+    from os import PathLike\r
+    def open(filename: Union[str, bytes, int, PathLike]) -> TextIO: ...\r
+else:\r
+    def open(filename: Union[str, bytes, int]) -> TextIO: ...\r
+\r
+# Names in __all__ with no definition:\r
+#   AMPER\r
+#   AMPEREQUAL\r
+#   ASYNC\r
+#   AT\r
+#   ATEQUAL\r
+#   AWAIT\r
+#   CIRCUMFLEX\r
+#   CIRCUMFLEXEQUAL\r
+#   COLON\r
+#   COMMA\r
+#   DEDENT\r
+#   DOT\r
+#   DOUBLESLASH\r
+#   DOUBLESLASHEQUAL\r
+#   DOUBLESTAR\r
+#   DOUBLESTAREQUAL\r
+#   ELLIPSIS\r
+#   ENDMARKER\r
+#   EQEQUAL\r
+#   EQUAL\r
+#   ERRORTOKEN\r
+#   GREATER\r
+#   GREATEREQUAL\r
+#   INDENT\r
+#   ISEOF\r
+#   ISNONTERMINAL\r
+#   ISTERMINAL\r
+#   LBRACE\r
+#   LEFTSHIFT\r
+#   LEFTSHIFTEQUAL\r
+#   LESS\r
+#   LESSEQUAL\r
+#   LPAR\r
+#   LSQB\r
+#   MINEQUAL\r
+#   MINUS\r
+#   NAME\r
+#   NEWLINE\r
+#   NOTEQUAL\r
+#   NT_OFFSET\r
+#   NUMBER\r
+#   N_TOKENS\r
+#   OP\r
+#   PERCENT\r
+#   PERCENTEQUAL\r
+#   PLUS\r
+#   PLUSEQUAL\r
+#   RARROW\r
+#   RBRACE\r
+#   RIGHTSHIFT\r
+#   RIGHTSHIFTEQUAL\r
+#   RPAR\r
+#   RSQB\r
+#   SEMI\r
+#   SLASH\r
+#   SLASHEQUAL\r
+#   STAR\r
+#   STAREQUAL\r
+#   STRING\r
+#   TILDE\r
+#   VBAR\r
+#   VBAREQUAL\r
+#   tok_name\r