1 from typing import Any, Tuple
41 TOKEN_BLOCK_BEGIN: Any
43 TOKEN_VARIABLE_BEGIN: Any
44 TOKEN_VARIABLE_END: Any
47 TOKEN_COMMENT_BEGIN: Any
48 TOKEN_COMMENT_END: Any
50 TOKEN_LINESTATEMENT_BEGIN: Any
51 TOKEN_LINESTATEMENT_END: Any
52 TOKEN_LINECOMMENT_BEGIN: Any
53 TOKEN_LINECOMMENT_END: Any
54 TOKEN_LINECOMMENT: Any
59 reverse_operators: Any
64 def describe_token(token): ...
65 def describe_token_expr(expr): ...
66 def count_newlines(value): ...
67 def compile_rules(environment): ...
72 def __init__(self, message, cls: Any = ...) -> None: ...
73 def __call__(self, lineno, filename): ...
75 class Token(Tuple[int, Any, Any]):
79 def __new__(cls, lineno, type, value): ...
80 def test(self, expr): ...
81 def test_any(self, *iterable): ...
83 class TokenStreamIterator:
85 def __init__(self, stream) -> None: ...
86 def __iter__(self): ...
87 def __next__(self): ...
94 def __init__(self, generator, name, filename) -> None: ...
95 def __iter__(self): ...
96 def __bool__(self): ...
99 def push(self, token): ...
101 def skip(self, n: int = ...): ...
102 def next_if(self, expr): ...
103 def skip_if(self, expr): ...
104 def __next__(self): ...
106 def expect(self, expr): ...
108 def get_lexer(environment): ...
111 newline_sequence: Any
112 keep_trailing_newline: Any
114 def __init__(self, environment) -> None: ...
115 def tokenize(self, source, name: Any | None = ..., filename: Any | None = ..., state: Any | None = ...): ...
116 def wrap(self, stream, name: Any | None = ..., filename: Any | None = ...): ...
117 def tokeniter(self, source, name, filename: Any | None = ..., state: Any | None = ...): ...