--- /dev/null
+from typing import Any, Optional\r
+\r
+whitespace_re = ... # type: Any\r
+string_re = ... # type: Any\r
+integer_re = ... # type: Any\r
+name_re = ... # type: Any\r
+float_re = ... # type: Any\r
+newline_re = ... # type: Any\r
+TOKEN_ADD = ... # type: Any\r
+TOKEN_ASSIGN = ... # type: Any\r
+TOKEN_COLON = ... # type: Any\r
+TOKEN_COMMA = ... # type: Any\r
+TOKEN_DIV = ... # type: Any\r
+TOKEN_DOT = ... # type: Any\r
+TOKEN_EQ = ... # type: Any\r
+TOKEN_FLOORDIV = ... # type: Any\r
+TOKEN_GT = ... # type: Any\r
+TOKEN_GTEQ = ... # type: Any\r
+TOKEN_LBRACE = ... # type: Any\r
+TOKEN_LBRACKET = ... # type: Any\r
+TOKEN_LPAREN = ... # type: Any\r
+TOKEN_LT = ... # type: Any\r
+TOKEN_LTEQ = ... # type: Any\r
+TOKEN_MOD = ... # type: Any\r
+TOKEN_MUL = ... # type: Any\r
+TOKEN_NE = ... # type: Any\r
+TOKEN_PIPE = ... # type: Any\r
+TOKEN_POW = ... # type: Any\r
+TOKEN_RBRACE = ... # type: Any\r
+TOKEN_RBRACKET = ... # type: Any\r
+TOKEN_RPAREN = ... # type: Any\r
+TOKEN_SEMICOLON = ... # type: Any\r
+TOKEN_SUB = ... # type: Any\r
+TOKEN_TILDE = ... # type: Any\r
+TOKEN_WHITESPACE = ... # type: Any\r
+TOKEN_FLOAT = ... # type: Any\r
+TOKEN_INTEGER = ... # type: Any\r
+TOKEN_NAME = ... # type: Any\r
+TOKEN_STRING = ... # type: Any\r
+TOKEN_OPERATOR = ... # type: Any\r
+TOKEN_BLOCK_BEGIN = ... # type: Any\r
+TOKEN_BLOCK_END = ... # type: Any\r
+TOKEN_VARIABLE_BEGIN = ... # type: Any\r
+TOKEN_VARIABLE_END = ... # type: Any\r
+TOKEN_RAW_BEGIN = ... # type: Any\r
+TOKEN_RAW_END = ... # type: Any\r
+TOKEN_COMMENT_BEGIN = ... # type: Any\r
+TOKEN_COMMENT_END = ... # type: Any\r
+TOKEN_COMMENT = ... # type: Any\r
+TOKEN_LINESTATEMENT_BEGIN = ... # type: Any\r
+TOKEN_LINESTATEMENT_END = ... # type: Any\r
+TOKEN_LINECOMMENT_BEGIN = ... # type: Any\r
+TOKEN_LINECOMMENT_END = ... # type: Any\r
+TOKEN_LINECOMMENT = ... # type: Any\r
+TOKEN_DATA = ... # type: Any\r
+TOKEN_INITIAL = ... # type: Any\r
+TOKEN_EOF = ... # type: Any\r
+operators = ... # type: Any\r
+reverse_operators = ... # type: Any\r
+operator_re = ... # type: Any\r
+ignored_tokens = ... # type: Any\r
+ignore_if_empty = ... # type: Any\r
+\r
+def describe_token(token): ...\r
+def describe_token_expr(expr): ...\r
+def count_newlines(value): ...\r
+def compile_rules(environment): ...\r
+\r
+class Failure:\r
+ message = ... # type: Any\r
+ error_class = ... # type: Any\r
+ def __init__(self, message, cls: Any = ...) -> None: ...\r
+ def __call__(self, lineno, filename): ...\r
+\r
+class Token(tuple):\r
+ lineno = ... # type: Any\r
+ type = ... # type: Any\r
+ value = ... # type: Any\r
+ def __new__(cls, lineno, type, value): ...\r
+ def test(self, expr): ...\r
+ def test_any(self, *iterable): ...\r
+\r
+class TokenStreamIterator:\r
+ stream = ... # type: Any\r
+ def __init__(self, stream) -> None: ...\r
+ def __iter__(self): ...\r
+ def __next__(self): ...\r
+\r
+class TokenStream:\r
+ name = ... # type: Any\r
+ filename = ... # type: Any\r
+ closed = ... # type: bool\r
+ current = ... # type: Any\r
+ def __init__(self, generator, name, filename) -> None: ...\r
+ def __iter__(self): ...\r
+ def __bool__(self): ...\r
+ __nonzero__ = ... # type: Any\r
+ eos = ... # type: Any\r
+ def push(self, token): ...\r
+ def look(self): ...\r
+ def skip(self, n: int = ...): ...\r
+ def next_if(self, expr): ...\r
+ def skip_if(self, expr): ...\r
+ def __next__(self): ...\r
+ def close(self): ...\r
+ def expect(self, expr): ...\r
+\r
+def get_lexer(environment): ...\r
+\r
+class Lexer:\r
+ newline_sequence = ... # type: Any\r
+ keep_trailing_newline = ... # type: Any\r
+ rules = ... # type: Any\r
+ def __init__(self, environment) -> None: ...\r
+ def tokenize(self, source, name: Optional[Any] = ..., filename: Optional[Any] = ..., state: Optional[Any] = ...): ...\r
+ def wrap(self, stream, name: Optional[Any] = ..., filename: Optional[Any] = ...): ...\r
+ def tokeniter(self, source, name, filename: Optional[Any] = ..., state: Optional[Any] = ...): ...\r