|
1 | | -from _typeshed import Incomplete |
2 | | -from collections.abc import Generator, Iterable, Iterator |
| 1 | +from _typeshed import ConvertibleToInt |
| 2 | +from collections.abc import Callable, Generator, Iterable, Iterator |
| 3 | +from re import Pattern |
| 4 | +from typing import Any, ClassVar, Final, Literal |
3 | 5 |
|
4 | 6 | from pygments.filter import Filter |
5 | 7 | from pygments.lexer import Lexer |
6 | 8 | from pygments.token import _TokenType |
7 | 9 |
|
8 | | -def find_filter_class(filtername): ... |
9 | | -def get_filter_by_name(filtername, **options): ... |
| 10 | +def find_filter_class(filtername: str) -> type[Filter] | None: ... |
| 11 | + |
| 12 | +# Keyword arguments are forwarded to the filter class. |
| 13 | +def get_filter_by_name(filtername: str, **options: Any) -> Filter: ... |
10 | 14 | def get_all_filters() -> Generator[str]: ... |
11 | 15 |
|
12 | 16 | class CodeTagFilter(Filter): |
13 | | - tag_re: Incomplete |
14 | | - def __init__(self, **options) -> None: ... |
15 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 17 | + tag_re: Pattern[str] |
| 18 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 19 | + def __init__( |
| 20 | + self, *, codetags: str | list[str] | tuple[str, ...] = ["XXX", "TODO", "FIXME", "BUG", "NOTE"], **options: Any |
| 21 | + ) -> None: ... |
| 22 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
16 | 23 |
|
17 | 24 | class SymbolFilter(Filter): |
18 | | - latex_symbols: Incomplete |
19 | | - isabelle_symbols: Incomplete |
20 | | - lang_map: Incomplete |
21 | | - symbols: Incomplete |
22 | | - def __init__(self, **options) -> None: ... |
23 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 25 | + latex_symbols: ClassVar[dict[str, str]] |
| 26 | + isabelle_symbols: ClassVar[dict[str, str]] |
| 27 | + lang_map: ClassVar[dict[Literal["isabelle", "latex"], dict[str, str]]] |
| 28 | + symbols: dict[str, str] # One of latex_symbols or isabelle_symbols. |
| 29 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 30 | + def __init__(self, *, lang: Literal["isabelle", "latex"] = "isabelle", **options: Any) -> None: ... |
| 31 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
24 | 32 |
|
25 | 33 | class KeywordCaseFilter(Filter): |
26 | | - convert: Incomplete |
27 | | - def __init__(self, **options) -> None: ... |
28 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 34 | + convert: Callable[[str], str] |
| 35 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 36 | + def __init__(self, *, case: Literal["lower", "upper", "capitalize"] = "lower", **options: Any) -> None: ... |
| 37 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
29 | 38 |
|
30 | 39 | class NameHighlightFilter(Filter): |
31 | | - names: Incomplete |
32 | | - tokentype: Incomplete |
33 | | - def __init__(self, **options) -> None: ... |
34 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 40 | + names: set[str] |
| 41 | + tokentype: _TokenType |
| 42 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 43 | + def __init__( |
| 44 | + self, *, names: str | list[str] | tuple[str, ...] = [], tokentype: str | _TokenType | None = None, **options: Any |
| 45 | + ) -> None: ... |
| 46 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
35 | 47 |
|
36 | 48 | class ErrorToken(Exception): ... |
37 | 49 |
|
38 | 50 | class RaiseOnErrorTokenFilter(Filter): |
39 | | - exception: Incomplete |
40 | | - def __init__(self, **options) -> None: ... |
41 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 51 | + exception: type[Exception] |
| 52 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 53 | + def __init__(self, *, excclass: type[Exception] = ..., **options: Any) -> None: ... |
| 54 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
42 | 55 |
|
43 | 56 | class VisibleWhitespaceFilter(Filter): |
44 | | - wstt: Incomplete |
45 | | - def __init__(self, **options) -> None: ... |
46 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 57 | + spaces: str |
| 58 | + tabs: str |
| 59 | + newlines: str |
| 60 | + wstt: bool |
| 61 | + def __init__( |
| 62 | + self, |
| 63 | + *, |
| 64 | + spaces: str | bool = False, |
| 65 | + tabs: str | bool = False, |
| 66 | + newlines: str | bool = False, |
| 67 | + tabsize: ConvertibleToInt = 8, |
| 68 | + wstokentype: bool | int | str = True, # Any value accepted by get_bool_opt. |
| 69 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 70 | + **options: Any, |
| 71 | + ) -> None: ... |
| 72 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
47 | 73 |
|
48 | 74 | class GobbleFilter(Filter): |
49 | | - n: Incomplete |
50 | | - def __init__(self, **options) -> None: ... |
51 | | - def gobble(self, value, left): ... |
52 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 75 | + n: int |
| 76 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 77 | + def __init__(self, *, n: ConvertibleToInt = 0, **options: Any) -> None: ... |
| 78 | + def gobble(self, value: str, left: int) -> tuple[str, int]: ... |
| 79 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
53 | 80 |
|
54 | 81 | class TokenMergeFilter(Filter): |
55 | | - def __init__(self, **options) -> None: ... |
56 | | - def filter(self, lexer: Lexer, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
| 82 | + # Arbitrary additional keyword arguments are permitted and are stored in self.options. |
| 83 | + def __init__(self, **options: Any) -> None: ... |
| 84 | + def filter(self, lexer: Lexer | None, stream: Iterable[tuple[_TokenType, str]]) -> Iterator[tuple[_TokenType, str]]: ... |
57 | 85 |
|
58 | | -FILTERS: Incomplete |
| 86 | +FILTERS: Final[dict[str, type[Filter]]] |
0 commit comments