1+ import re
12from _typeshed import Incomplete
2- from collections .abc import Iterable , Iterator , Sequence
3+ from collections .abc import Callable , Iterable , Iterator , Sequence
34from re import RegexFlag
4- from typing import ClassVar
5+ from typing import Any , ClassVar
56
7+ from pygments .filter import Filter
68from pygments .token import _TokenType
79from pygments .util import Future
810
11+ line_re : re .Pattern [str ]
12+
913class LexerMeta (type ):
10- def __new__ (cls , name , bases , d ): ...
14+ def __new__ (cls , name : str , bases : tuple [ type , ...], d : dict [ str , Any ] ): ...
1115 def analyse_text (self , text : str ) -> float : ... # actually defined in class Lexer
1216 # ClassVars of Lexer, but same situation as with StyleMeta and Style
1317 name : str
@@ -19,83 +23,129 @@ class LexerMeta(type):
1923 url : str | None
2024
2125class Lexer (metaclass = LexerMeta ):
22- options : Incomplete
23- stripnl : Incomplete
24- stripall : Incomplete
25- ensurenl : Incomplete
26- tabsize : Incomplete
27- encoding : Incomplete
28- filters : Incomplete
29- def __init__ (self , ** options ) -> None : ...
30- def add_filter (self , filter_ , ** options ) -> None : ...
26+ options : dict [ str , Any ]
27+ stripnl : bool
28+ stripall : bool
29+ ensurenl : bool
30+ tabsize : int
31+ encoding : str
32+ filters : list [ Filter ]
33+ def __init__ (self , ** options : Any ) -> None : ...
34+ def add_filter (self , filter_ : Filter , ** options : Any ) -> None : ...
3135 def get_tokens (self , text : str , unfiltered : bool = False ) -> Iterator [tuple [_TokenType , str ]]: ...
3236 def get_tokens_unprocessed (self , text : str ) -> Iterator [tuple [int , _TokenType , str ]]: ...
3337
3438class DelegatingLexer (Lexer ):
35- root_lexer : Incomplete
36- language_lexer : Incomplete
39+ root_lexer : Lexer
40+ language_lexer : Lexer
3741 needle : Incomplete
38- def __init__ (self , _root_lexer , _language_lexer , _needle = ..., ** options ) -> None : ...
42+ def __init__ (
43+ self , _root_lexer : type [Lexer ], _language_lexer : type [Lexer ], _needle : _TokenType = ..., ** options : Any
44+ ) -> None : ...
3945 def get_tokens_unprocessed (self , text : str ) -> Iterator [tuple [int , _TokenType , str ]]: ...
4046
4147class include (str ): ...
4248class _inherit : ...
4349
44- inherit : Incomplete
50+ inherit : _inherit
4551
46- class combined (tuple [Incomplete , ...]):
47- def __new__ (cls , * args ): ...
48- def __init__ (self , * args ) -> None : ...
52+ class combined (tuple [str , ...]):
53+ def __new__ (cls , * args : str ): ...
54+ def __init__ (self , * args : str ) -> None : ...
4955
5056class _PseudoMatch :
51- def __init__ (self , start , text ) -> None : ...
52- def start (self , arg = None ): ...
53- def end (self , arg = None ): ...
54- def group (self , arg = None ): ...
55- def groups (self ): ...
56- def groupdict (self ): ...
57+ def __init__ (self , start : int , text : str ) -> None : ...
58+ def start (self , arg = None ) -> int : ...
59+ def end (self , arg = None ) -> int : ...
60+ def group (self , arg = None ) -> str : ...
61+ def groups (self ) -> tuple [ str ] : ...
62+ def groupdict (self ) -> dict [ str , Any ] : ...
5763
58- def bygroups (* args ): ...
64+ def bygroups (
65+ * args : _TokenType | Callable [[Lexer , _PseudoMatch , LexerContext ], Iterator [tuple [int , _TokenType , str ]]]
66+ ) -> Callable [[Lexer , _PseudoMatch , LexerContext ], Iterator [tuple [int , _TokenType , str ]]]: ...
5967
6068class _This : ...
6169
62- this : Incomplete
70+ this : _This
6371
64- def using (_other , ** kwargs ): ...
72+ def using (
73+ _other : _This | Lexer , ** kwargs : Any
74+ ) -> Callable [[Lexer , _PseudoMatch , LexerContext ], Iterator [tuple [int , _TokenType , str ]]]: ...
6575
6676class default :
67- state : Incomplete
68- def __init__ (self , state ) -> None : ...
77+ state : str
78+ def __init__ (self , state : str ) -> None : ...
6979
7080class words (Future ):
71- words : Incomplete
72- prefix : Incomplete
73- suffix : Incomplete
74- def __init__ (self , words , prefix : str = "" , suffix : str = "" ) -> None : ...
75- def get (self ): ...
81+ words : Sequence [ str ]
82+ prefix : str
83+ suffix : str
84+ def __init__ (self , words : Sequence [ str ] , prefix : str = "" , suffix : str = "" ) -> None : ...
85+ def get (self ) -> str : ...
7686
7787class RegexLexerMeta (LexerMeta ):
78- def process_tokendef (cls , name , tokendefs = None ): ...
79- def get_tokendefs (cls ): ...
80- def __call__ (cls , * args , ** kwds ): ...
88+ def process_tokendef (
89+ cls ,
90+ name : str ,
91+ tokendefs : (
92+ dict [
93+ str ,
94+ list [
95+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
96+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
97+ ],
98+ ]
99+ | None
100+ ) = None ,
101+ ): ...
102+ def get_tokendefs (
103+ cls ,
104+ ) -> dict [
105+ str ,
106+ list [
107+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
108+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
109+ ],
110+ ]: ...
111+ def __call__ (cls , * args : Any , ** kwds : Any ) -> Any : ...
81112
82113class RegexLexer (Lexer , metaclass = RegexLexerMeta ):
83114 flags : ClassVar [RegexFlag ]
84- tokens : ClassVar [dict [str , list [Incomplete ]]]
115+ tokens : ClassVar [
116+ dict [
117+ str ,
118+ list [
119+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
120+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
121+ ],
122+ ]
123+ ]
85124 def get_tokens_unprocessed (self , text : str , stack : Iterable [str ] = ("root" ,)) -> Iterator [tuple [int , _TokenType , str ]]: ...
86125
87126class LexerContext :
88- text : Incomplete
89- pos : Incomplete
90- end : Incomplete
91- stack : Incomplete
92- def __init__ (self , text , pos , stack = None , end = None ) -> None : ...
127+ text : str
128+ pos : int
129+ end : int
130+ stack : list [ str ]
131+ def __init__ (self , text : str , pos : int , stack : list [ str ] | None = None , end : int | None = None ) -> None : ...
93132
94133class ExtendedRegexLexer (RegexLexer ):
95134 def get_tokens_unprocessed ( # type: ignore[override]
96135 self , text : str | None = None , context : LexerContext | None = None
97136 ) -> Iterator [tuple [int , _TokenType , str ]]: ...
98137
138+ def do_insertions (
139+ insertions : list [tuple [int , list [tuple [int , _TokenType , str ]]]],
140+ tokens : dict [
141+ str ,
142+ list [
143+ tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]]]
144+ | tuple [str , _TokenType | Iterator [tuple [int , _TokenType , str ]], str ]
145+ ],
146+ ],
147+ ) -> Iterator [tuple [int , _TokenType , str ]]: ...
148+
99149class ProfilingRegexLexerMeta (RegexLexerMeta ): ...
100150
101151class ProfilingRegexLexer (RegexLexer , metaclass = ProfilingRegexLexerMeta ):
0 commit comments