1 from collections.abc import Iterable, Iterator
4 from pygments.filter import Filter
5 from pygments.lexer import Lexer
6 from pygments.token import _TokenType
8 def find_filter_class(filtername): ...
9 def get_filter_by_name(filtername, **options): ...
10 def get_all_filters(): ...
12 class CodeTagFilter(Filter):
14 def __init__(self, **options) -> None: ...
15 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
17 class SymbolFilter(Filter):
22 def __init__(self, **options) -> None: ...
23 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
25 class KeywordCaseFilter(Filter):
27 def __init__(self, **options) -> None: ...
28 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
30 class NameHighlightFilter(Filter):
33 def __init__(self, **options) -> None: ...
34 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
36 class ErrorToken(Exception): ...
38 class RaiseOnErrorTokenFilter(Filter):
40 def __init__(self, **options) -> None: ...
41 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
43 class VisibleWhitespaceFilter(Filter):
45 def __init__(self, **options) -> None: ...
46 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
48 class GobbleFilter(Filter):
50 def __init__(self, **options) -> None: ...
51 def gobble(self, value, left): ...
52 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
54 class TokenMergeFilter(Filter):
55 def __init__(self, **options) -> None: ...
56 def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...