@@ -2,7 +2,7 @@ import { _Tokenizer } from './Tokenizer.ts';
2
2
import { _defaults } from './defaults.ts' ;
3
3
import { other , block , inline } from './rules.ts' ;
4
4
import type { Token , TokensList , Tokens } from './Tokens.ts' ;
5
- import type { MarkedOptions , TokenizerExtension } from './MarkedOptions.ts' ;
5
+ import type { MarkedOptions } from './MarkedOptions.ts' ;
6
6
7
7
/**
8
8
* Block Lexer
@@ -85,8 +85,7 @@ export class _Lexer {
85
85
* Preprocessing
86
86
*/
87
87
lex ( src : string ) {
88
- src = src
89
- . replace ( other . carriageReturn , '\n' ) ;
88
+ src = src . replace ( other . carriageReturn , '\n' ) ;
90
89
91
90
this . blockTokens ( src , this . tokens ) ;
92
91
@@ -109,31 +108,28 @@ export class _Lexer {
109
108
src = src . replace ( other . tabCharGlobal , ' ' ) . replace ( other . spaceLine , '' ) ;
110
109
}
111
110
112
- let token : Tokens . Generic | undefined ;
113
- let lastToken ;
114
- let cutSrc ;
115
-
116
111
while ( src ) {
117
- if ( this . options . extensions
118
- && this . options . extensions . block
119
- && this . options . extensions . block . some ( ( extTokenizer : TokenizerExtension [ 'tokenizer' ] ) => {
120
- if ( token = extTokenizer . call ( { lexer : this } , src , tokens ) ) {
121
- src = src . substring ( token . raw . length ) ;
122
- tokens . push ( token ) ;
123
- return true ;
124
- }
125
- return false ;
126
- } ) ) {
112
+ let token : Tokens . Generic | undefined ;
113
+
114
+ if ( this . options . extensions ? .block ? .some ( ( extTokenizer ) => {
115
+ if ( token = extTokenizer . call ( { lexer : this } , src , tokens ) ) {
116
+ src = src . substring ( token . raw . length ) ;
117
+ tokens . push ( token ) ;
118
+ return true ;
119
+ }
120
+ return false ;
121
+ } ) ) {
127
122
continue ;
128
123
}
129
124
130
125
// newline
131
126
if ( token = this . tokenizer . space ( src ) ) {
132
127
src = src . substring ( token . raw . length ) ;
133
- if ( token . raw . length === 1 && tokens . length > 0 ) {
128
+ const lastToken = tokens . at ( - 1 ) ;
129
+ if ( token . raw . length === 1 && lastToken !== undefined ) {
134
130
// if there's a single \n as a spacer, it's terminating the last line,
135
131
// so move it there so that we don't get unnecessary paragraph tags
136
- tokens [ tokens . length - 1 ] . raw += '\n' ;
132
+ lastToken . raw += '\n' ;
137
133
} else {
138
134
tokens . push ( token ) ;
139
135
}
@@ -143,12 +139,12 @@ export class _Lexer {
143
139
// code
144
140
if ( token = this . tokenizer . code ( src ) ) {
145
141
src = src . substring ( token . raw . length ) ;
146
- lastToken = tokens [ tokens . length - 1 ] ;
142
+ const lastToken = tokens . at ( - 1 ) ;
147
143
// An indented code block cannot interrupt a paragraph.
148
- if ( lastToken && ( lastToken . type === 'paragraph' || lastToken . type === 'text' ) ) {
144
+ if ( lastToken ? .type === 'paragraph' || lastToken ? .type === 'text' ) {
149
145
lastToken . raw += '\n' + token . raw ;
150
146
lastToken . text += '\n' + token . text ;
151
- this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
147
+ this . inlineQueue . at ( - 1 ) ! . src = lastToken . text ;
152
148
} else {
153
149
tokens . push ( token ) ;
154
150
}
@@ -200,11 +196,11 @@ export class _Lexer {
200
196
// def
201
197
if ( token = this . tokenizer . def ( src ) ) {
202
198
src = src . substring ( token . raw . length ) ;
203
- lastToken = tokens [ tokens . length - 1 ] ;
204
- if ( lastToken && ( lastToken . type === 'paragraph' || lastToken . type === 'text' ) ) {
199
+ const lastToken = tokens . at ( - 1 ) ;
200
+ if ( lastToken ? .type === 'paragraph' || lastToken ? .type === 'text' ) {
205
201
lastToken . raw += '\n' + token . raw ;
206
202
lastToken . text += '\n' + token . raw ;
207
- this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
203
+ this . inlineQueue . at ( - 1 ) ! . src = lastToken . text ;
208
204
} else if ( ! this . tokens . links [ token . tag ] ) {
209
205
this . tokens . links [ token . tag ] = {
210
206
href : token . href ,
@@ -230,43 +226,45 @@ export class _Lexer {
230
226
231
227
// top-level paragraph
232
228
// prevent paragraph consuming extensions by clipping 'src' to extension start
233
- cutSrc = src ;
234
- if ( this . options . extensions && this . options . extensions . startBlock ) {
229
+ let cutSrc = src ;
230
+ if ( this . options . extensions ? .startBlock ) {
235
231
let startIndex = Infinity ;
236
232
const tempSrc = src . slice ( 1 ) ;
237
233
let tempStart ;
238
234
this . options . extensions . startBlock . forEach ( ( getStartIndex ) => {
239
235
tempStart = getStartIndex . call ( { lexer : this } , tempSrc ) ;
240
- if ( typeof tempStart === 'number' && tempStart >= 0 ) { startIndex = Math . min ( startIndex , tempStart ) ; }
236
+ if ( typeof tempStart === 'number' && tempStart >= 0 ) {
237
+ startIndex = Math . min ( startIndex , tempStart ) ;
238
+ }
241
239
} ) ;
242
240
if ( startIndex < Infinity && startIndex >= 0 ) {
243
241
cutSrc = src . substring ( 0 , startIndex + 1 ) ;
244
242
}
245
243
}
246
244
if ( this . state . top && ( token = this . tokenizer . paragraph ( cutSrc ) ) ) {
247
- lastToken = tokens [ tokens . length - 1 ] ;
245
+ const lastToken = tokens . at ( - 1 ) ;
248
246
if ( lastParagraphClipped && lastToken ?. type === 'paragraph' ) {
249
247
lastToken . raw += '\n' + token . raw ;
250
248
lastToken . text += '\n' + token . text ;
251
249
this . inlineQueue . pop ( ) ;
252
- this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
250
+ this . inlineQueue . at ( - 1 ) ! . src = lastToken . text ;
253
251
} else {
254
252
tokens . push ( token ) ;
255
253
}
256
- lastParagraphClipped = ( cutSrc . length !== src . length ) ;
254
+ lastParagraphClipped = cutSrc . length !== src . length ;
257
255
src = src . substring ( token . raw . length ) ;
258
256
continue ;
259
257
}
260
258
261
259
// text
262
260
if ( token = this . tokenizer . text ( src ) ) {
263
261
src = src . substring ( token . raw . length ) ;
264
- lastToken = tokens [ tokens . length - 1 ] ;
265
- if ( lastToken && lastToken . type === 'text' ) {
262
+ const lastToken = tokens . at ( - 1 ) ;
263
+ if ( lastToken ? .type === 'text' ) {
266
264
lastToken . raw += '\n' + token . raw ;
267
265
lastToken . text += '\n' + token . text ;
268
266
this . inlineQueue . pop ( ) ;
269
- this . inlineQueue [ this . inlineQueue . length - 1 ] . src = lastToken . text ;
267
+ this . inlineQueue . at ( - 1 ) ! . src = lastToken . text ;
270
268
} else {
271
269
tokens . push ( token ) ;
272
270
}
@@ -297,20 +295,19 @@ export class _Lexer {
297
295
* Lexing/Compiling
298
296
*/
299
297
inlineTokens ( src : string , tokens : Token [ ] = [ ] ) : Token [ ] {
300
- let token , lastToken , cutSrc ;
301
-
302
298
// String with links masked to avoid interference with em and strong
303
299
let maskedSrc = src ;
304
- let match ;
305
- let keepPrevChar , prevChar ;
300
+ let match : RegExpExecArray | null = null ;
306
301
307
302
// Mask out reflinks
308
303
if ( this . tokens . links ) {
309
304
const links = Object . keys ( this . tokens . links ) ;
310
305
if ( links . length > 0 ) {
311
306
while ( ( match = this . tokenizer . rules . inline . reflinkSearch . exec ( maskedSrc ) ) != null ) {
312
307
if ( links . includes ( match [ 0 ] . slice ( match [ 0 ] . lastIndexOf ( '[' ) + 1 , - 1 ) ) ) {
313
- maskedSrc = maskedSrc . slice ( 0 , match . index ) + '[' + 'a' . repeat ( match [ 0 ] . length - 2 ) + ']' + maskedSrc . slice ( this . tokenizer . rules . inline . reflinkSearch . lastIndex ) ;
308
+ maskedSrc = maskedSrc . slice ( 0 , match . index )
309
+ + '[' + 'a' . repeat ( match [ 0 ] . length - 2 ) + ']'
310
+ + maskedSrc . slice ( this . tokenizer . rules . inline . reflinkSearch . lastIndex ) ;
314
311
}
315
312
}
316
313
}
@@ -325,23 +322,25 @@ export class _Lexer {
325
322
maskedSrc = maskedSrc . slice ( 0 , match . index ) + '++' + maskedSrc . slice ( this . tokenizer . rules . inline . anyPunctuation . lastIndex ) ;
326
323
}
327
324
325
+ let keepPrevChar = false ;
326
+ let prevChar = '' ;
328
327
while ( src ) {
329
328
if ( ! keepPrevChar ) {
330
329
prevChar = '' ;
331
330
}
332
331
keepPrevChar = false ;
333
332
333
+ let token : Tokens . Generic | undefined ;
334
+
334
335
// extensions
335
- if ( this . options . extensions
336
- && this . options . extensions . inline
337
- && this . options . extensions . inline . some ( ( extTokenizer ) => {
338
- if ( token = extTokenizer . call ( { lexer : this } , src , tokens ) ) {
339
- src = src . substring ( token . raw . length ) ;
340
- tokens . push ( token ) ;
341
- return true ;
342
- }
343
- return false ;
344
- } ) ) {
336
+ if ( this . options . extensions ?. inline ?. some ( ( extTokenizer ) => {
337
+ if ( token = extTokenizer . call ( { lexer : this } , src , tokens ) ) {
338
+ src = src . substring ( token . raw . length ) ;
339
+ tokens . push ( token ) ;
340
+ return true ;
341
+ }
342
+ return false ;
343
+ } ) ) {
345
344
continue ;
346
345
}
347
346
@@ -355,7 +354,6 @@ export class _Lexer {
355
354
// tag
356
355
if ( token = this . tokenizer . tag ( src ) ) {
357
356
src = src . substring ( token . raw . length ) ;
358
- lastToken = tokens [ tokens . length - 1 ] ;
359
357
tokens . push ( token ) ;
360
358
continue ;
361
359
}
@@ -370,8 +368,8 @@ export class _Lexer {
370
368
// reflink, nolink
371
369
if ( token = this . tokenizer . reflink ( src , this . tokens . links ) ) {
372
370
src = src . substring ( token . raw . length ) ;
373
- lastToken = tokens [ tokens . length - 1 ] ;
374
- if ( lastToken && token . type === 'text' && lastToken . type === 'text' ) {
371
+ const lastToken = tokens . at ( - 1 ) ;
372
+ if ( token . type === 'text' && lastToken ? .type === 'text' ) {
375
373
lastToken . raw += token . raw ;
376
374
lastToken . text += token . text ;
377
375
} else {
@@ -424,14 +422,16 @@ export class _Lexer {
424
422
425
423
// text
426
424
// prevent inlineText consuming extensions by clipping 'src' to extension start
427
- cutSrc = src ;
428
- if ( this . options . extensions && this . options . extensions . startInline ) {
425
+ let cutSrc = src ;
426
+ if ( this . options . extensions ? .startInline ) {
429
427
let startIndex = Infinity ;
430
428
const tempSrc = src . slice ( 1 ) ;
431
429
let tempStart ;
432
430
this . options . extensions . startInline . forEach ( ( getStartIndex ) => {
433
431
tempStart = getStartIndex . call ( { lexer : this } , tempSrc ) ;
434
- if ( typeof tempStart === 'number' && tempStart >= 0 ) { startIndex = Math . min ( startIndex , tempStart ) ; }
432
+ if ( typeof tempStart === 'number' && tempStart >= 0 ) {
433
+ startIndex = Math . min ( startIndex , tempStart ) ;
434
+ }
435
435
} ) ;
436
436
if ( startIndex < Infinity && startIndex >= 0 ) {
437
437
cutSrc = src . substring ( 0 , startIndex + 1 ) ;
@@ -443,8 +443,8 @@ export class _Lexer {
443
443
prevChar = token . raw . slice ( - 1 ) ;
444
444
}
445
445
keepPrevChar = true ;
446
- lastToken = tokens [ tokens . length - 1 ] ;
447
- if ( lastToken && lastToken . type === 'text' ) {
446
+ const lastToken = tokens . at ( - 1 ) ;
447
+ if ( lastToken ? .type === 'text' ) {
448
448
lastToken . raw += token . raw ;
449
449
lastToken . text += token . text ;
450
450
} else {
0 commit comments