Skip to content

Commit 1f579f7

Browse files
fix: Remove unused plus typescript tightening (#3527)
* chore: remove unused build file * chore: remove unused package * chore: remove unused function * chore: remove unnecessary | undefineds * core: replace unnecessary &&s with optional chaining * chore: use .at(-x) instead of .length - x property access gives stricter TS typing, is more concise * chore: tighten TS types * chore: sort tokens alphabetically * fix: typeof plus !== null check * chore: type test for .parse, .use * fix: if check
1 parent a46c0d8 commit 1f579f7

17 files changed

+223
-232
lines changed

bower.json

-23
This file was deleted.

docs/demo/demo.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -185,7 +185,7 @@ function setOptions(opts) {
185185
$optionsElem.value = JSON.stringify(
186186
opts,
187187
(key, value) => {
188-
if (value && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
188+
if (value !== null && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
189189
return undefined;
190190
}
191191
return value;

package-lock.json

-7
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

-1
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,6 @@
7676
"rollup": "^4.25.0",
7777
"semantic-release": "^24.2.0",
7878
"titleize": "^4.0.0",
79-
"ts-expect": "^1.3.0",
8079
"tslib": "^2.8.1",
8180
"typescript": "5.6.3"
8281
},

src/Hooks.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ import type { Token, TokensList } from './Tokens.ts';
66

77
export class _Hooks {
88
options: MarkedOptions;
9-
block: boolean | undefined;
9+
block?: boolean;
1010

1111
constructor(options?: MarkedOptions) {
1212
this.options = options || _defaults;

src/Instance.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -265,11 +265,11 @@ export class Marked {
265265
type overloadedParse = {
266266
(src: string, options: MarkedOptions & { async: true }): Promise<string>;
267267
(src: string, options: MarkedOptions & { async: false }): string;
268-
(src: string, options?: MarkedOptions | undefined | null): string | Promise<string>;
268+
(src: string, options?: MarkedOptions | null): string | Promise<string>;
269269
};
270270

271271
// eslint-disable-next-line @typescript-eslint/no-explicit-any
272-
const parse: overloadedParse = (src: string, options?: MarkedOptions | undefined | null): any => {
272+
const parse: overloadedParse = (src: string, options?: MarkedOptions | null): any => {
273273
const origOpt = { ...options };
274274
const opt = { ...this.defaults, ...origOpt };
275275

src/Lexer.ts

+57-57
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { _Tokenizer } from './Tokenizer.ts';
22
import { _defaults } from './defaults.ts';
33
import { other, block, inline } from './rules.ts';
44
import type { Token, TokensList, Tokens } from './Tokens.ts';
5-
import type { MarkedOptions, TokenizerExtension } from './MarkedOptions.ts';
5+
import type { MarkedOptions } from './MarkedOptions.ts';
66

77
/**
88
* Block Lexer
@@ -85,8 +85,7 @@ export class _Lexer {
8585
* Preprocessing
8686
*/
8787
lex(src: string) {
88-
src = src
89-
.replace(other.carriageReturn, '\n');
88+
src = src.replace(other.carriageReturn, '\n');
9089

9190
this.blockTokens(src, this.tokens);
9291

@@ -109,31 +108,28 @@ export class _Lexer {
109108
src = src.replace(other.tabCharGlobal, ' ').replace(other.spaceLine, '');
110109
}
111110

112-
let token: Tokens.Generic | undefined;
113-
let lastToken;
114-
let cutSrc;
115-
116111
while (src) {
117-
if (this.options.extensions
118-
&& this.options.extensions.block
119-
&& this.options.extensions.block.some((extTokenizer: TokenizerExtension['tokenizer']) => {
120-
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
121-
src = src.substring(token.raw.length);
122-
tokens.push(token);
123-
return true;
124-
}
125-
return false;
126-
})) {
112+
let token: Tokens.Generic | undefined;
113+
114+
if (this.options.extensions?.block?.some((extTokenizer) => {
115+
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
116+
src = src.substring(token.raw.length);
117+
tokens.push(token);
118+
return true;
119+
}
120+
return false;
121+
})) {
127122
continue;
128123
}
129124

130125
// newline
131126
if (token = this.tokenizer.space(src)) {
132127
src = src.substring(token.raw.length);
133-
if (token.raw.length === 1 && tokens.length > 0) {
128+
const lastToken = tokens.at(-1);
129+
if (token.raw.length === 1 && lastToken !== undefined) {
134130
// if there's a single \n as a spacer, it's terminating the last line,
135131
// so move it there so that we don't get unnecessary paragraph tags
136-
tokens[tokens.length - 1].raw += '\n';
132+
lastToken.raw += '\n';
137133
} else {
138134
tokens.push(token);
139135
}
@@ -143,12 +139,12 @@ export class _Lexer {
143139
// code
144140
if (token = this.tokenizer.code(src)) {
145141
src = src.substring(token.raw.length);
146-
lastToken = tokens[tokens.length - 1];
142+
const lastToken = tokens.at(-1);
147143
// An indented code block cannot interrupt a paragraph.
148-
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
144+
if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
149145
lastToken.raw += '\n' + token.raw;
150146
lastToken.text += '\n' + token.text;
151-
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
147+
this.inlineQueue.at(-1)!.src = lastToken.text;
152148
} else {
153149
tokens.push(token);
154150
}
@@ -200,11 +196,11 @@ export class _Lexer {
200196
// def
201197
if (token = this.tokenizer.def(src)) {
202198
src = src.substring(token.raw.length);
203-
lastToken = tokens[tokens.length - 1];
204-
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
199+
const lastToken = tokens.at(-1);
200+
if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
205201
lastToken.raw += '\n' + token.raw;
206202
lastToken.text += '\n' + token.raw;
207-
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
203+
this.inlineQueue.at(-1)!.src = lastToken.text;
208204
} else if (!this.tokens.links[token.tag]) {
209205
this.tokens.links[token.tag] = {
210206
href: token.href,
@@ -230,43 +226,45 @@ export class _Lexer {
230226

231227
// top-level paragraph
232228
// prevent paragraph consuming extensions by clipping 'src' to extension start
233-
cutSrc = src;
234-
if (this.options.extensions && this.options.extensions.startBlock) {
229+
let cutSrc = src;
230+
if (this.options.extensions?.startBlock) {
235231
let startIndex = Infinity;
236232
const tempSrc = src.slice(1);
237233
let tempStart;
238234
this.options.extensions.startBlock.forEach((getStartIndex) => {
239235
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
240-
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
236+
if (typeof tempStart === 'number' && tempStart >= 0) {
237+
startIndex = Math.min(startIndex, tempStart);
238+
}
241239
});
242240
if (startIndex < Infinity && startIndex >= 0) {
243241
cutSrc = src.substring(0, startIndex + 1);
244242
}
245243
}
246244
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
247-
lastToken = tokens[tokens.length - 1];
245+
const lastToken = tokens.at(-1);
248246
if (lastParagraphClipped && lastToken?.type === 'paragraph') {
249247
lastToken.raw += '\n' + token.raw;
250248
lastToken.text += '\n' + token.text;
251249
this.inlineQueue.pop();
252-
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
250+
this.inlineQueue.at(-1)!.src = lastToken.text;
253251
} else {
254252
tokens.push(token);
255253
}
256-
lastParagraphClipped = (cutSrc.length !== src.length);
254+
lastParagraphClipped = cutSrc.length !== src.length;
257255
src = src.substring(token.raw.length);
258256
continue;
259257
}
260258

261259
// text
262260
if (token = this.tokenizer.text(src)) {
263261
src = src.substring(token.raw.length);
264-
lastToken = tokens[tokens.length - 1];
265-
if (lastToken && lastToken.type === 'text') {
262+
const lastToken = tokens.at(-1);
263+
if (lastToken?.type === 'text') {
266264
lastToken.raw += '\n' + token.raw;
267265
lastToken.text += '\n' + token.text;
268266
this.inlineQueue.pop();
269-
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
267+
this.inlineQueue.at(-1)!.src = lastToken.text;
270268
} else {
271269
tokens.push(token);
272270
}
@@ -297,20 +295,19 @@ export class _Lexer {
297295
* Lexing/Compiling
298296
*/
299297
inlineTokens(src: string, tokens: Token[] = []): Token[] {
300-
let token, lastToken, cutSrc;
301-
302298
// String with links masked to avoid interference with em and strong
303299
let maskedSrc = src;
304-
let match;
305-
let keepPrevChar, prevChar;
300+
let match: RegExpExecArray | null = null;
306301

307302
// Mask out reflinks
308303
if (this.tokens.links) {
309304
const links = Object.keys(this.tokens.links);
310305
if (links.length > 0) {
311306
while ((match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != null) {
312307
if (links.includes(match[0].slice(match[0].lastIndexOf('[') + 1, -1))) {
313-
maskedSrc = maskedSrc.slice(0, match.index) + '[' + 'a'.repeat(match[0].length - 2) + ']' + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
308+
maskedSrc = maskedSrc.slice(0, match.index)
309+
+ '[' + 'a'.repeat(match[0].length - 2) + ']'
310+
+ maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
314311
}
315312
}
316313
}
@@ -325,23 +322,25 @@ export class _Lexer {
325322
maskedSrc = maskedSrc.slice(0, match.index) + '++' + maskedSrc.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);
326323
}
327324

325+
let keepPrevChar = false;
326+
let prevChar = '';
328327
while (src) {
329328
if (!keepPrevChar) {
330329
prevChar = '';
331330
}
332331
keepPrevChar = false;
333332

333+
let token: Tokens.Generic | undefined;
334+
334335
// extensions
335-
if (this.options.extensions
336-
&& this.options.extensions.inline
337-
&& this.options.extensions.inline.some((extTokenizer) => {
338-
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
339-
src = src.substring(token.raw.length);
340-
tokens.push(token);
341-
return true;
342-
}
343-
return false;
344-
})) {
336+
if (this.options.extensions?.inline?.some((extTokenizer) => {
337+
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
338+
src = src.substring(token.raw.length);
339+
tokens.push(token);
340+
return true;
341+
}
342+
return false;
343+
})) {
345344
continue;
346345
}
347346

@@ -355,7 +354,6 @@ export class _Lexer {
355354
// tag
356355
if (token = this.tokenizer.tag(src)) {
357356
src = src.substring(token.raw.length);
358-
lastToken = tokens[tokens.length - 1];
359357
tokens.push(token);
360358
continue;
361359
}
@@ -370,8 +368,8 @@ export class _Lexer {
370368
// reflink, nolink
371369
if (token = this.tokenizer.reflink(src, this.tokens.links)) {
372370
src = src.substring(token.raw.length);
373-
lastToken = tokens[tokens.length - 1];
374-
if (lastToken && token.type === 'text' && lastToken.type === 'text') {
371+
const lastToken = tokens.at(-1);
372+
if (token.type === 'text' && lastToken?.type === 'text') {
375373
lastToken.raw += token.raw;
376374
lastToken.text += token.text;
377375
} else {
@@ -424,14 +422,16 @@ export class _Lexer {
424422

425423
// text
426424
// prevent inlineText consuming extensions by clipping 'src' to extension start
427-
cutSrc = src;
428-
if (this.options.extensions && this.options.extensions.startInline) {
425+
let cutSrc = src;
426+
if (this.options.extensions?.startInline) {
429427
let startIndex = Infinity;
430428
const tempSrc = src.slice(1);
431429
let tempStart;
432430
this.options.extensions.startInline.forEach((getStartIndex) => {
433431
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
434-
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
432+
if (typeof tempStart === 'number' && tempStart >= 0) {
433+
startIndex = Math.min(startIndex, tempStart);
434+
}
435435
});
436436
if (startIndex < Infinity && startIndex >= 0) {
437437
cutSrc = src.substring(0, startIndex + 1);
@@ -443,8 +443,8 @@ export class _Lexer {
443443
prevChar = token.raw.slice(-1);
444444
}
445445
keepPrevChar = true;
446-
lastToken = tokens[tokens.length - 1];
447-
if (lastToken && lastToken.type === 'text') {
446+
const lastToken = tokens.at(-1);
447+
if (lastToken?.type === 'text') {
448448
lastToken.raw += token.raw;
449449
lastToken.text += token.text;
450450
} else {

0 commit comments

Comments
 (0)