1 from collections.abc import Iterable, Iterator, Sequence
2 from typing import Any, Tuple
4 from pygments.token import _TokenType
5 from pygments.util import Future
8 def __new__(cls, name, bases, d): ...
9 def analyse_text(self, text) -> None: ... # actually defined in class Lexer
10 # ClassVars of Lexer, but same situation as with StyleMeta and Style
12 aliases: Sequence[str] # not intended mutable
13 filenames: Sequence[str]
14 alias_filenames: Sequence[str]
15 mimetypes: Sequence[str]
18 class Lexer(metaclass=LexerMeta):
26 def __init__(self, **options) -> None: ...
27 def add_filter(self, filter_, **options) -> None: ...
28 def get_tokens(self, text: str, unfiltered: bool = ...) -> Iterator[tuple[_TokenType, str]]: ...
29 def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
31 class DelegatingLexer(Lexer):
35 def __init__(self, _root_lexer, _language_lexer, _needle=..., **options) -> None: ...
36 def get_tokens_unprocessed(self, text: str) -> Iterator[tuple[int, _TokenType, str]]: ...
38 class include(str): ...
43 class combined(Tuple[Any]):
44 def __new__(cls, *args): ...
45 def __init__(self, *args) -> None: ...
48 def __init__(self, start, text) -> None: ...
49 def start(self, arg: Any | None = ...): ...
50 def end(self, arg: Any | None = ...): ...
51 def group(self, arg: Any | None = ...): ...
53 def groupdict(self): ...
55 def bygroups(*args): ...
61 def using(_other, **kwargs): ...
65 def __init__(self, state) -> None: ...
71 def __init__(self, words, prefix: str = ..., suffix: str = ...) -> None: ...
74 class RegexLexerMeta(LexerMeta):
75 def process_tokendef(cls, name, tokendefs: Any | None = ...): ...
76 def get_tokendefs(cls): ...
77 def __call__(cls, *args, **kwds): ...
79 class RegexLexer(Lexer, metaclass=RegexLexerMeta):
82 def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ...
89 def __init__(self, text, pos, stack: Any | None = ..., end: Any | None = ...) -> None: ...
91 class ExtendedRegexLexer(RegexLexer):
92 def get_tokens_unprocessed(self, text: str | None = ..., context: LexerContext | None = ...) -> Iterator[tuple[int, _TokenType, str]]: ... # type: ignore
94 class ProfilingRegexLexerMeta(RegexLexerMeta): ...
96 class ProfilingRegexLexer(RegexLexer, metaclass=ProfilingRegexLexerMeta):
97 def get_tokens_unprocessed(self, text: str, stack: Iterable[str] = ...) -> Iterator[tuple[int, _TokenType, str]]: ...