massive update, probably broken
[dotfiles/.git] / .config / coc / extensions / node_modules / coc-pyright / node_modules / pyright / dist / typeshed-fallback / stubs / Pygments / pygments / filters / __init__.pyi
1 from collections.abc import Iterable, Iterator
2 from typing import Any
3
4 from pygments.filter import Filter
5 from pygments.lexer import Lexer
6 from pygments.token import _TokenType
7
8 def find_filter_class(filtername): ...
9 def get_filter_by_name(filtername, **options): ...
10 def get_all_filters(): ...
11
12 class CodeTagFilter(Filter):
13     tag_re: Any
14     def __init__(self, **options) -> None: ...
15     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
16
17 class SymbolFilter(Filter):
18     latex_symbols: Any
19     isabelle_symbols: Any
20     lang_map: Any
21     symbols: Any
22     def __init__(self, **options) -> None: ...
23     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
24
25 class KeywordCaseFilter(Filter):
26     convert: Any
27     def __init__(self, **options) -> None: ...
28     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
29
30 class NameHighlightFilter(Filter):
31     names: Any
32     tokentype: Any
33     def __init__(self, **options) -> None: ...
34     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
35
36 class ErrorToken(Exception): ...
37
38 class RaiseOnErrorTokenFilter(Filter):
39     exception: Any
40     def __init__(self, **options) -> None: ...
41     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
42
43 class VisibleWhitespaceFilter(Filter):
44     wstt: Any
45     def __init__(self, **options) -> None: ...
46     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
47
48 class GobbleFilter(Filter):
49     n: Any
50     def __init__(self, **options) -> None: ...
51     def gobble(self, value, left): ...
52     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
53
54 class TokenMergeFilter(Filter):
55     def __init__(self, **options) -> None: ...
56     def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ...
57
58 FILTERS: Any