From 495e0985e01a5e1bedbde1e02aeaf1b2cf92f405 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Sat, 6 Jan 2024 17:54:19 -0500
Subject: [PATCH 01/11] chore: remove unused build file
---
bower.json | 23 -----------------------
1 file changed, 23 deletions(-)
delete mode 100644 bower.json
diff --git a/bower.json b/bower.json
deleted file mode 100644
index 6acb02fe14..0000000000
--- a/bower.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "name": "marked",
- "homepage": "https://github.com/markedjs/marked",
- "authors": [
- "Christopher Jeffrey "
- ],
- "description": "A markdown parser built for speed",
- "keywords": [
- "markdown",
- "markup",
- "html"
- ],
- "main": "lib/marked.cjs",
- "license": "MIT",
- "ignore": [
- "**/.*",
- "node_modules",
- "bower_components",
- "app/bower_components",
- "test",
- "tests"
- ]
-}
From 95d0cddb76f39ce63c4560d6f3fef341fe2c4d67 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Sat, 9 Nov 2024 09:57:03 -0500
Subject: [PATCH 02/11] chore: remove unused package
---
package-lock.json | 7 -------
package.json | 1 -
2 files changed, 8 deletions(-)
diff --git a/package-lock.json b/package-lock.json
index 6fc136ef0b..460a0470c5 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -35,7 +35,6 @@
"rollup": "^4.25.0",
"semantic-release": "^24.2.0",
"titleize": "^4.0.0",
- "ts-expect": "^1.3.0",
"tslib": "^2.8.1",
"typescript": "5.6.3"
},
@@ -8698,12 +8697,6 @@
"typescript": ">=4.2.0"
}
},
- "node_modules/ts-expect": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/ts-expect/-/ts-expect-1.3.0.tgz",
- "integrity": "sha512-e4g0EJtAjk64xgnFPD6kTBUtpnMVzDrMb12N1YZV0VvSlhnVT3SGxiYTLdGy8Q5cYHOIC/FAHmZ10eGrAguicQ==",
- "dev": true
- },
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
diff --git a/package.json b/package.json
index 2dffa5f1be..12f3b15613 100644
--- a/package.json
+++ b/package.json
@@ -76,7 +76,6 @@
"rollup": "^4.25.0",
"semantic-release": "^24.2.0",
"titleize": "^4.0.0",
- "ts-expect": "^1.3.0",
"tslib": "^2.8.1",
"typescript": "5.6.3"
},
From 69e0ab6ed5563b57fb7a0de5f7a20b89f53b5777 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Mon, 1 Jan 2024 15:02:01 -0500
Subject: [PATCH 03/11] chore: remove unused function
---
test/types/marked.ts | 5 -----
1 file changed, 5 deletions(-)
diff --git a/test/types/marked.ts b/test/types/marked.ts
index 7c799f926f..80a3b565f5 100644
--- a/test/types/marked.ts
+++ b/test/types/marked.ts
@@ -46,11 +46,6 @@ let options: MarkedOptions = {
options = marked.getDefaults();
options = marked.defaults;
-function callback(err: Error | null, markdown: string | undefined) {
- console.log('Callback called!');
- console.log(markdown);
-}
-
let myOldMarked: typeof marked = marked.options(options);
myOldMarked = marked.setOptions(options);
From 0e51be085210fa313bcd687450200b285a02d800 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Mon, 1 Jan 2024 15:02:31 -0500
Subject: [PATCH 04/11] chore: remove unnecessary | undefineds
---
src/Hooks.ts | 2 +-
src/Instance.ts | 4 ++--
src/MarkedOptions.ts | 28 ++++++++++++++--------------
src/Tokens.ts | 10 +++++-----
src/marked.ts | 4 ++--
5 files changed, 24 insertions(+), 24 deletions(-)
diff --git a/src/Hooks.ts b/src/Hooks.ts
index 8d84b89c0d..d64a342eb3 100644
--- a/src/Hooks.ts
+++ b/src/Hooks.ts
@@ -6,7 +6,7 @@ import type { Token, TokensList } from './Tokens.ts';
export class _Hooks {
options: MarkedOptions;
- block: boolean | undefined;
+ block?: boolean;
constructor(options?: MarkedOptions) {
this.options = options || _defaults;
diff --git a/src/Instance.ts b/src/Instance.ts
index abadb87230..23ac904624 100644
--- a/src/Instance.ts
+++ b/src/Instance.ts
@@ -265,11 +265,11 @@ export class Marked {
type overloadedParse = {
(src: string, options: MarkedOptions & { async: true }): Promise;
(src: string, options: MarkedOptions & { async: false }): string;
- (src: string, options?: MarkedOptions | undefined | null): string | Promise;
+ (src: string, options?: MarkedOptions | null): string | Promise;
};
// eslint-disable-next-line @typescript-eslint/no-explicit-any
- const parse: overloadedParse = (src: string, options?: MarkedOptions | undefined | null): any => {
+ const parse: overloadedParse = (src: string, options?: MarkedOptions | null): any => {
const origOpt = { ...options };
const opt = { ...this.defaults, ...origOpt };
diff --git a/src/MarkedOptions.ts b/src/MarkedOptions.ts
index a1bf485d7a..1794fb9ad3 100644
--- a/src/MarkedOptions.ts
+++ b/src/MarkedOptions.ts
@@ -16,9 +16,9 @@ export type TokenizerStartFunction = (this: TokenizerThis, src: string) => numbe
export interface TokenizerExtension {
name: string;
level: 'block' | 'inline';
- start?: TokenizerStartFunction | undefined;
+ start?: TokenizerStartFunction;
tokenizer: TokenizerExtensionFunction;
- childTokens?: string[] | undefined;
+ childTokens?: string[];
}
export interface RendererThis {
@@ -58,19 +58,19 @@ export interface MarkedExtension {
/**
* Enable GFM line breaks. This option requires the gfm option to be true.
*/
- breaks?: boolean | undefined;
+ breaks?: boolean;
/**
* Add tokenizers and renderers to marked
*/
extensions?:
| TokenizerAndRendererExtension[]
- | undefined | null;
+ | null;
/**
* Enable GitHub flavored markdown.
*/
- gfm?: boolean | undefined;
+ gfm?: boolean;
/**
* Hooks are methods that hook into some part of marked.
@@ -80,29 +80,29 @@ export interface MarkedExtension {
* provideLexer is called to provide a function to tokenize markdown.
* provideParser is called to provide a function to parse tokens.
*/
- hooks?: HooksObject | undefined | null;
+ hooks?: HooksObject | null;
/**
* Conform to obscure parts of markdown.pl as much as possible. Don't fix any of the original markdown bugs or poor behavior.
*/
- pedantic?: boolean | undefined;
+ pedantic?: boolean;
/**
* Type: object Default: new Renderer()
*
* An object containing functions to render tokens to HTML.
*/
- renderer?: RendererObject | undefined | null;
+ renderer?: RendererObject | null;
/**
* Shows an HTML error message when rendering fails.
*/
- silent?: boolean | undefined;
+ silent?: boolean;
/**
* The tokenizer defines how to turn markdown text into tokens.
*/
- tokenizer?: TokenizerObject | undefined | null;
+ tokenizer?: TokenizerObject | null;
/**
* The walkTokens function gets called with every token.
@@ -110,26 +110,26 @@ export interface MarkedExtension {
* Each token is passed by reference so updates are persisted when passed to the parser.
* The return value of the function is ignored.
*/
- walkTokens?: ((token: Token) => void | Promise) | undefined | null;
+ walkTokens?: ((token: Token) => void | Promise) | null;
}
export interface MarkedOptions extends Omit {
/**
* Hooks are methods that hook into some part of marked.
*/
- hooks?: _Hooks | undefined | null;
+ hooks?: _Hooks | null;
/**
* Type: object Default: new Renderer()
*
* An object containing functions to render tokens to HTML.
*/
- renderer?: _Renderer | undefined | null;
+ renderer?: _Renderer | null;
/**
* The tokenizer defines how to turn markdown text into tokens.
*/
- tokenizer?: _Tokenizer | undefined | null;
+ tokenizer?: _Tokenizer | null;
/**
* Custom extensions
diff --git a/src/Tokens.ts b/src/Tokens.ts
index 0e8bf346f3..37aa7a11cc 100644
--- a/src/Tokens.ts
+++ b/src/Tokens.ts
@@ -36,8 +36,8 @@ export namespace Tokens {
export interface Code {
type: 'code';
raw: string;
- codeBlockStyle?: 'indented' | undefined;
- lang?: string | undefined;
+ codeBlockStyle?: 'indented';
+ lang?: string;
text: string;
escaped?: boolean;
}
@@ -94,7 +94,7 @@ export namespace Tokens {
type: 'list_item';
raw: string;
task: boolean;
- checked?: boolean | undefined;
+ checked?: boolean;
loose: boolean;
text: string;
tokens: Token[];
@@ -107,7 +107,7 @@ export namespace Tokens {
export interface Paragraph {
type: 'paragraph';
raw: string;
- pre?: boolean | undefined;
+ pre?: boolean;
text: string;
tokens: Token[];
}
@@ -206,7 +206,7 @@ export namespace Tokens {
type: string;
raw: string;
- tokens?: Token[] | undefined;
+ tokens?: Token[];
}
}
diff --git a/src/marked.ts b/src/marked.ts
index e6ce66b345..c9539db55f 100644
--- a/src/marked.ts
+++ b/src/marked.ts
@@ -34,8 +34,8 @@ export function marked(src: string, options: MarkedOptions & { async: true }): P
*/
export function marked(src: string, options: MarkedOptions & { async: false }): string;
export function marked(src: string, options: MarkedOptions & { async: true }): Promise;
-export function marked(src: string, options?: MarkedOptions | undefined | null): string | Promise;
-export function marked(src: string, opt?: MarkedOptions | undefined | null): string | Promise {
+export function marked(src: string, options?: MarkedOptions | null): string | Promise;
+export function marked(src: string, opt?: MarkedOptions | null): string | Promise {
return markedInstance.parse(src, opt);
}
From 7261366e88522e322835ccf290c738545a48abf7 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Fri, 5 Jan 2024 00:57:09 -0500
Subject: [PATCH 05/11] core: replace unnecessary &&s with optional chaining
---
docs/demo/demo.js | 2 +-
src/Lexer.ts | 52 +++++++++++++++++++---------------------
src/Parser.ts | 4 ++--
src/Renderer.ts | 2 +-
src/Tokenizer.ts | 2 +-
test/rules.js | 4 ++--
test/unit/marked.test.js | 2 +-
7 files changed, 32 insertions(+), 36 deletions(-)
diff --git a/docs/demo/demo.js b/docs/demo/demo.js
index 83d227b3f5..1f274cc29c 100644
--- a/docs/demo/demo.js
+++ b/docs/demo/demo.js
@@ -185,7 +185,7 @@ function setOptions(opts) {
$optionsElem.value = JSON.stringify(
opts,
(key, value) => {
- if (value && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
+ if (typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
return undefined;
}
return value;
diff --git a/src/Lexer.ts b/src/Lexer.ts
index a3389cacc0..c79751ad1a 100644
--- a/src/Lexer.ts
+++ b/src/Lexer.ts
@@ -2,7 +2,7 @@ import { _Tokenizer } from './Tokenizer.ts';
import { _defaults } from './defaults.ts';
import { other, block, inline } from './rules.ts';
import type { Token, TokensList, Tokens } from './Tokens.ts';
-import type { MarkedOptions, TokenizerExtension } from './MarkedOptions.ts';
+import type { MarkedOptions } from './MarkedOptions.ts';
/**
* Block Lexer
@@ -114,16 +114,14 @@ export class _Lexer {
let cutSrc;
while (src) {
- if (this.options.extensions
- && this.options.extensions.block
- && this.options.extensions.block.some((extTokenizer: TokenizerExtension['tokenizer']) => {
- if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
- src = src.substring(token.raw.length);
- tokens.push(token);
- return true;
- }
- return false;
- })) {
+ if (this.options.extensions?.block?.some((extTokenizer) => {
+ if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
+ src = src.substring(token.raw.length);
+ tokens.push(token);
+ return true;
+ }
+ return false;
+ })) {
continue;
}
@@ -145,7 +143,7 @@ export class _Lexer {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
// An indented code block cannot interrupt a paragraph.
- if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
+ if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
@@ -201,7 +199,7 @@ export class _Lexer {
if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
- if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
+ if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.raw;
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
@@ -231,7 +229,7 @@ export class _Lexer {
// top-level paragraph
// prevent paragraph consuming extensions by clipping 'src' to extension start
cutSrc = src;
- if (this.options.extensions && this.options.extensions.startBlock) {
+ if (this.options.extensions?.startBlock) {
let startIndex = Infinity;
const tempSrc = src.slice(1);
let tempStart;
@@ -262,7 +260,7 @@ export class _Lexer {
if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
- if (lastToken && lastToken.type === 'text') {
+ if (lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue.pop();
@@ -332,16 +330,14 @@ export class _Lexer {
keepPrevChar = false;
// extensions
- if (this.options.extensions
- && this.options.extensions.inline
- && this.options.extensions.inline.some((extTokenizer) => {
- if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
- src = src.substring(token.raw.length);
- tokens.push(token);
- return true;
- }
- return false;
- })) {
+ if (this.options.extensions?.inline?.some((extTokenizer) => {
+ if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
+ src = src.substring(token.raw.length);
+ tokens.push(token);
+ return true;
+ }
+ return false;
+ })) {
continue;
}
@@ -371,7 +367,7 @@ export class _Lexer {
if (token = this.tokenizer.reflink(src, this.tokens.links)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
- if (lastToken && token.type === 'text' && lastToken.type === 'text') {
+ if (token.type === 'text' && lastToken?.type === 'text') {
lastToken.raw += token.raw;
lastToken.text += token.text;
} else {
@@ -425,7 +421,7 @@ export class _Lexer {
// text
// prevent inlineText consuming extensions by clipping 'src' to extension start
cutSrc = src;
- if (this.options.extensions && this.options.extensions.startInline) {
+ if (this.options.extensions?.startInline) {
let startIndex = Infinity;
const tempSrc = src.slice(1);
let tempStart;
@@ -444,7 +440,7 @@ export class _Lexer {
}
keepPrevChar = true;
lastToken = tokens[tokens.length - 1];
- if (lastToken && lastToken.type === 'text') {
+ if (lastToken?.type === 'text') {
lastToken.raw += token.raw;
lastToken.text += token.text;
} else {
diff --git a/src/Parser.ts b/src/Parser.ts
index ebaaa2113c..d8840f0133 100644
--- a/src/Parser.ts
+++ b/src/Parser.ts
@@ -46,7 +46,7 @@ export class _Parser {
const anyToken = tokens[i];
// Run any renderer extensions
- if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[anyToken.type]) {
+ if (this.options.extensions?.renderers?.[anyToken.type]) {
const genericToken = anyToken as Tokens.Generic;
const ret = this.options.extensions.renderers[genericToken.type].call({ parser: this }, genericToken);
if (ret !== false || !['space', 'hr', 'heading', 'code', 'table', 'blockquote', 'list', 'html', 'paragraph', 'text'].includes(genericToken.type)) {
@@ -140,7 +140,7 @@ export class _Parser {
const anyToken = tokens[i];
// Run any renderer extensions
- if (this.options.extensions && this.options.extensions.renderers && this.options.extensions.renderers[anyToken.type]) {
+ if (this.options.extensions?.renderers?.[anyToken.type]) {
const ret = this.options.extensions.renderers[anyToken.type].call({ parser: this }, anyToken);
if (ret !== false || !['escape', 'html', 'link', 'image', 'strong', 'em', 'codespan', 'br', 'del', 'text'].includes(anyToken.type)) {
out += ret || '';
diff --git a/src/Renderer.ts b/src/Renderer.ts
index 68dcada672..0f3496c307 100644
--- a/src/Renderer.ts
+++ b/src/Renderer.ts
@@ -77,7 +77,7 @@ export class _Renderer {
if (item.task) {
const checkbox = this.checkbox({ checked: !!item.checked });
if (item.loose) {
- if (item.tokens.length > 0 && item.tokens[0].type === 'paragraph') {
+ if (item.tokens[0]?.type === 'paragraph') {
item.tokens[0].text = checkbox + ' ' + item.tokens[0].text;
if (item.tokens[0].tokens && item.tokens[0].tokens.length > 0 && item.tokens[0].tokens[0].type === 'text') {
item.tokens[0].tokens[0].text = checkbox + ' ' + escape(item.tokens[0].tokens[0].text);
diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts
index 0c2f9c705f..0b77cb6974 100644
--- a/src/Tokenizer.ts
+++ b/src/Tokenizer.ts
@@ -486,7 +486,7 @@ export class _Tokenizer {
const headers = splitCells(cap[1]);
const aligns = cap[2].replace(this.rules.other.tableAlignChars, '').split('|');
- const rows = cap[3] && cap[3].trim() ? cap[3].replace(this.rules.other.tableRowBlankLine, '').split('\n') : [];
+ const rows = cap[3]?.trim() ? cap[3].replace(this.rules.other.tableRowBlankLine, '').split('\n') : [];
const item: Tokens.Table = {
type: 'table',
diff --git a/test/rules.js b/test/rules.js
index 0072dcf92b..187bcfd745 100644
--- a/test/rules.js
+++ b/test/rules.js
@@ -34,7 +34,7 @@ function propsToString(obj) {
return null;
}
if (obj.constructor.name === 'Object') {
- if (obj.exec && obj.exec.name === 'noopTest') {
+ if (obj.exec?.name === 'noopTest') {
return null;
}
for (const prop in obj) {
@@ -61,7 +61,7 @@ if (process.argv.length > 2) {
rule = rule[prop];
}
}
- rulesList[rulePath[0]] = rule && rule[rulePath[0]] ? rule[rulePath[0]] : null;
+ rulesList[rulePath[0]] = rule?.[rulePath[0]] ?? null;
}
} else {
rulesObj = rules;
diff --git a/test/unit/marked.test.js b/test/unit/marked.test.js
index 87931c86a0..9635a88cfc 100644
--- a/test/unit/marked.test.js
+++ b/test/unit/marked.test.js
@@ -674,7 +674,7 @@ used extension2 walked
walkTokens(token) {
if (token.tokens) {
const finalChildToken = token.tokens[token.tokens.length - 1];
- if (finalChildToken && finalChildToken.type === 'inlineStyleTag') {
+ if (finalChildToken?.type === 'inlineStyleTag') {
token.originalType = token.type;
token.type = 'styled';
token.style = `style="color:${finalChildToken.text};"`;
From dd97338e6c04cbf38aeb37d4b298a556a9dc06d3 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Fri, 5 Jan 2024 00:57:58 -0500
Subject: [PATCH 06/11] chore: use .at(-x) instead of .length - x property
access
gives stricter TS typing, is more concise
---
src/Lexer.ts | 29 ++++++++++++++---------------
src/Tokenizer.ts | 11 +++++++----
src/helpers.ts | 2 +-
test/unit/marked.test.js | 2 +-
4 files changed, 23 insertions(+), 21 deletions(-)
diff --git a/src/Lexer.ts b/src/Lexer.ts
index c79751ad1a..dfd1ba1e4f 100644
--- a/src/Lexer.ts
+++ b/src/Lexer.ts
@@ -110,7 +110,6 @@ export class _Lexer {
}
let token: Tokens.Generic | undefined;
- let lastToken;
let cutSrc;
while (src) {
@@ -128,10 +127,11 @@ export class _Lexer {
// newline
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
- if (token.raw.length === 1 && tokens.length > 0) {
+ const lastToken = tokens.at(-1);
+ if (token.raw.length === 1 && lastToken !== undefined) {
// if there's a single \n as a spacer, it's terminating the last line,
// so move it there so that we don't get unnecessary paragraph tags
- tokens[tokens.length - 1].raw += '\n';
+ lastToken.raw += '\n';
} else {
tokens.push(token);
}
@@ -141,12 +141,12 @@ export class _Lexer {
// code
if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length);
- lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
// An indented code block cannot interrupt a paragraph.
if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
- this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+ this.inlineQueue.at(-1)!.src = lastToken.text;
} else {
tokens.push(token);
}
@@ -198,11 +198,11 @@ export class _Lexer {
// def
if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length);
- lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.raw;
- this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+ this.inlineQueue.at(-1)!.src = lastToken.text;
} else if (!this.tokens.links[token.tag]) {
this.tokens.links[token.tag] = {
href: token.href,
@@ -242,12 +242,12 @@ export class _Lexer {
}
}
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
- lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
if (lastParagraphClipped && lastToken?.type === 'paragraph') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue.pop();
- this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+ this.inlineQueue.at(-1)!.src = lastToken.text;
} else {
tokens.push(token);
}
@@ -259,12 +259,12 @@ export class _Lexer {
// text
if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length);
- lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
if (lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue.pop();
- this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
+ this.inlineQueue.at(-1)!.src = lastToken.text;
} else {
tokens.push(token);
}
@@ -295,7 +295,7 @@ export class _Lexer {
* Lexing/Compiling
*/
inlineTokens(src: string, tokens: Token[] = []): Token[] {
- let token, lastToken, cutSrc;
+ let token, cutSrc;
// String with links masked to avoid interference with em and strong
let maskedSrc = src;
@@ -351,7 +351,6 @@ export class _Lexer {
// tag
if (token = this.tokenizer.tag(src)) {
src = src.substring(token.raw.length);
- lastToken = tokens[tokens.length - 1];
tokens.push(token);
continue;
}
@@ -366,7 +365,7 @@ export class _Lexer {
// reflink, nolink
if (token = this.tokenizer.reflink(src, this.tokens.links)) {
src = src.substring(token.raw.length);
- lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
if (token.type === 'text' && lastToken?.type === 'text') {
lastToken.raw += token.raw;
lastToken.text += token.text;
@@ -439,7 +438,7 @@ export class _Lexer {
prevChar = token.raw.slice(-1);
}
keepPrevChar = true;
- lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
if (lastToken?.type === 'text') {
lastToken.raw += token.raw;
lastToken.text += token.text;
diff --git a/src/Tokenizer.ts b/src/Tokenizer.ts
index 0b77cb6974..a71879546d 100644
--- a/src/Tokenizer.ts
+++ b/src/Tokenizer.ts
@@ -198,7 +198,7 @@ export class _Tokenizer {
break;
}
- const lastToken = tokens[tokens.length - 1];
+ const lastToken = tokens.at(-1);
if (lastToken?.type === 'code') {
// blockquote continuation cannot be preceded by a code block
@@ -222,7 +222,7 @@ export class _Tokenizer {
raw = raw.substring(0, raw.length - lastToken.raw.length) + newToken.raw;
text = text.substring(0, text.length - oldToken.raw.length) + newToken.raw;
- lines = newText.substring(tokens[tokens.length - 1].raw.length).split('\n');
+ lines = newText.substring(tokens.at(-1)!.raw.length).split('\n');
continue;
}
}
@@ -414,8 +414,11 @@ export class _Tokenizer {
}
// Do not consume newlines at end of final item. Alternatively, make itemRegex *start* with any newlines to simplify/speed up endsWithBlankLine logic
- list.items[list.items.length - 1].raw = list.items[list.items.length - 1].raw.trimEnd();
- list.items[list.items.length - 1].text = list.items[list.items.length - 1].text.trimEnd();
+ const lastItem = list.items.at(-1);
+ if (lastItem) {
+ lastItem.raw = lastItem.raw.trimEnd();
+ lastItem.text = lastItem.text.trimEnd();
+ }
list.raw = list.raw.trimEnd();
// Item child tokens handled here at end because we needed to have the final item to trim it first
diff --git a/src/helpers.ts b/src/helpers.ts
index 723051b0b2..60a75fb458 100644
--- a/src/helpers.ts
+++ b/src/helpers.ts
@@ -72,7 +72,7 @@ export function splitCells(tableRow: string, count?: number) {
if (!cells[0].trim()) {
cells.shift();
}
- if (cells.length > 0 && !cells[cells.length - 1].trim()) {
+ if (!cells.at(-1)?.trim()) {
cells.pop();
}
diff --git a/test/unit/marked.test.js b/test/unit/marked.test.js
index 9635a88cfc..294bdf2930 100644
--- a/test/unit/marked.test.js
+++ b/test/unit/marked.test.js
@@ -673,7 +673,7 @@ used extension2 walked
}],
walkTokens(token) {
if (token.tokens) {
- const finalChildToken = token.tokens[token.tokens.length - 1];
+ const finalChildToken = token.tokens.at(-1);
if (finalChildToken?.type === 'inlineStyleTag') {
token.originalType = token.type;
token.type = 'styled';
From afc0b076114bfd1eead61499079d32de4761f204 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Fri, 5 Jan 2024 01:13:05 -0500
Subject: [PATCH 07/11] chore: tighten TS types
---
src/Lexer.ts | 35 ++++++++++++++++++++---------------
src/Parser.ts | 3 +--
2 files changed, 21 insertions(+), 17 deletions(-)
diff --git a/src/Lexer.ts b/src/Lexer.ts
index dfd1ba1e4f..d8094d6d0f 100644
--- a/src/Lexer.ts
+++ b/src/Lexer.ts
@@ -85,8 +85,7 @@ export class _Lexer {
* Preprocessing
*/
lex(src: string) {
- src = src
- .replace(other.carriageReturn, '\n');
+ src = src.replace(other.carriageReturn, '\n');
this.blockTokens(src, this.tokens);
@@ -109,10 +108,9 @@ export class _Lexer {
src = src.replace(other.tabCharGlobal, ' ').replace(other.spaceLine, '');
}
- let token: Tokens.Generic | undefined;
- let cutSrc;
-
while (src) {
+ let token: Tokens.Generic | undefined;
+
if (this.options.extensions?.block?.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
@@ -228,14 +226,16 @@ export class _Lexer {
// top-level paragraph
// prevent paragraph consuming extensions by clipping 'src' to extension start
- cutSrc = src;
+ let cutSrc = src;
if (this.options.extensions?.startBlock) {
let startIndex = Infinity;
const tempSrc = src.slice(1);
let tempStart;
this.options.extensions.startBlock.forEach((getStartIndex) => {
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
- if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
+ if (typeof tempStart === 'number' && tempStart >= 0) {
+ startIndex = Math.min(startIndex, tempStart);
+ }
});
if (startIndex < Infinity && startIndex >= 0) {
cutSrc = src.substring(0, startIndex + 1);
@@ -251,7 +251,7 @@ export class _Lexer {
} else {
tokens.push(token);
}
- lastParagraphClipped = (cutSrc.length !== src.length);
+ lastParagraphClipped = cutSrc.length !== src.length;
src = src.substring(token.raw.length);
continue;
}
@@ -295,12 +295,9 @@ export class _Lexer {
* Lexing/Compiling
*/
inlineTokens(src: string, tokens: Token[] = []): Token[] {
- let token, cutSrc;
-
// String with links masked to avoid interference with em and strong
let maskedSrc = src;
- let match;
- let keepPrevChar, prevChar;
+ let match: RegExpExecArray | null = null;
// Mask out reflinks
if (this.tokens.links) {
@@ -308,7 +305,9 @@ export class _Lexer {
if (links.length > 0) {
while ((match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != null) {
if (links.includes(match[0].slice(match[0].lastIndexOf('[') + 1, -1))) {
- maskedSrc = maskedSrc.slice(0, match.index) + '[' + 'a'.repeat(match[0].length - 2) + ']' + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
+ maskedSrc = maskedSrc.slice(0, match.index)
+ + '[' + 'a'.repeat(match[0].length - 2) + ']'
+ + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
}
}
}
@@ -323,12 +322,16 @@ export class _Lexer {
maskedSrc = maskedSrc.slice(0, match.index) + '++' + maskedSrc.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);
}
+ let keepPrevChar = false;
+ let prevChar = '';
while (src) {
if (!keepPrevChar) {
prevChar = '';
}
keepPrevChar = false;
+ let token: Tokens.Generic | undefined;
+
// extensions
if (this.options.extensions?.inline?.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
@@ -419,14 +422,16 @@ export class _Lexer {
// text
// prevent inlineText consuming extensions by clipping 'src' to extension start
- cutSrc = src;
+ let cutSrc = src;
if (this.options.extensions?.startInline) {
let startIndex = Infinity;
const tempSrc = src.slice(1);
let tempStart;
this.options.extensions.startInline.forEach((getStartIndex) => {
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
- if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
+ if (typeof tempStart === 'number' && tempStart >= 0) {
+ startIndex = Math.min(startIndex, tempStart);
+ }
});
if (startIndex < Infinity && startIndex >= 0) {
cutSrc = src.substring(0, startIndex + 1);
diff --git a/src/Parser.ts b/src/Parser.ts
index d8840f0133..04173c3dc1 100644
--- a/src/Parser.ts
+++ b/src/Parser.ts
@@ -132,8 +132,7 @@ export class _Parser {
/**
* Parse Inline Tokens
*/
- parseInline(tokens: Token[], renderer?: _Renderer | _TextRenderer): string {
- renderer = renderer || this.renderer;
+ parseInline(tokens: Token[], renderer: _Renderer | _TextRenderer = this.renderer): string {
let out = '';
for (let i = 0; i < tokens.length; i++) {
From e65758204d7a53096a9c55b04b8a0c11b25b5554 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Fri, 5 Jan 2024 11:34:45 -0500
Subject: [PATCH 08/11] chore: sort tokens alphabetically
---
src/Tokens.ts | 202 +++++++++++++++++++++++++-------------------------
1 file changed, 101 insertions(+), 101 deletions(-)
diff --git a/src/Tokens.ts b/src/Tokens.ts
index 37aa7a11cc..a5dc142723 100644
--- a/src/Tokens.ts
+++ b/src/Tokens.ts
@@ -1,38 +1,50 @@
/* eslint-disable no-use-before-define */
export type MarkedToken = (
- Tokens.Space
+ Tokens.Blockquote
+ | Tokens.Br
| Tokens.Code
+ | Tokens.Codespan
+ | Tokens.Def
+ | Tokens.Del
+ | Tokens.Em
+ | Tokens.Escape
| Tokens.Heading
- | Tokens.Table
| Tokens.Hr
- | Tokens.Blockquote
- | Tokens.List
- | Tokens.ListItem
- | Tokens.Paragraph
| Tokens.HTML
- | Tokens.Text
- | Tokens.Def
- | Tokens.Escape
- | Tokens.Tag
| Tokens.Image
| Tokens.Link
+ | Tokens.List
+ | Tokens.ListItem
+ | Tokens.Paragraph
+ | Tokens.Space
| Tokens.Strong
- | Tokens.Em
- | Tokens.Codespan
- | Tokens.Br
- | Tokens.Del);
+ | Tokens.Table
+ | Tokens.Tag
+ | Tokens.Text
+);
export type Token = (
- MarkedToken
+ MarkedToken
| Tokens.Generic);
export namespace Tokens {
- export interface Space {
- type: 'space';
+ export interface Blockquote {
+ type: 'blockquote';
+ raw: string;
+ text: string;
+ tokens: Token[];
+ }
+
+ export interface Br {
+ type: 'br';
raw: string;
}
+ export interface Checkbox {
+ checked: boolean;
+ }
+
export interface Code {
type: 'code';
raw: string;
@@ -42,74 +54,59 @@ export namespace Tokens {
escaped?: boolean;
}
- export interface Heading {
- type: 'heading';
+ export interface Codespan {
+ type: 'codespan';
raw: string;
- depth: number;
text: string;
- tokens: Token[];
}
- export interface Table {
- type: 'table';
+ export interface Def {
+ type: 'def';
raw: string;
- align: Array<'center' | 'left' | 'right' | null>;
- header: TableCell[];
- rows: TableCell[][];
- }
-
- export interface TableRow {
- text: string;
+ tag: string;
+ href: string;
+ title: string;
}
- export interface TableCell {
+ export interface Del {
+ type: 'del';
+ raw: string;
text: string;
tokens: Token[];
- header: boolean;
- align: 'center' | 'left' | 'right' | null;
}
- export interface Hr {
- type: 'hr';
+ export interface Em {
+ type: 'em';
raw: string;
+ text: string;
+ tokens: Token[];
}
- export interface Blockquote {
- type: 'blockquote';
+ export interface Escape {
+ type: 'escape';
raw: string;
text: string;
- tokens: Token[];
}
- export interface List {
- type: 'list';
+ export interface Generic {
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ [index: string]: any;
+ type: string;
raw: string;
- ordered: boolean;
- start: number | '';
- loose: boolean;
- items: ListItem[];
+ tokens?: Token[];
}
- export interface ListItem {
- type: 'list_item';
+ export interface Heading {
+ type: 'heading';
raw: string;
- task: boolean;
- checked?: boolean;
- loose: boolean;
+ depth: number;
text: string;
tokens: Token[];
}
- export interface Checkbox {
- checked: boolean;
- }
-
- export interface Paragraph {
- type: 'paragraph';
+ export interface Hr {
+ type: 'hr';
raw: string;
- pre?: boolean;
- text: string;
- tokens: Token[];
}
export interface HTML {
@@ -120,52 +117,53 @@ export namespace Tokens {
block: boolean;
}
- export interface Text {
- type: 'text';
+ export interface Image {
+ type: 'image';
raw: string;
+ href: string;
+ title: string | null;
text: string;
- tokens?: Token[];
- escaped?: boolean;
}
- export interface Def {
- type: 'def';
+ export interface Link {
+ type: 'link';
raw: string;
- tag: string;
href: string;
- title: string;
+ title?: string | null;
+ text: string;
+ tokens: Token[];
}
- export interface Escape {
- type: 'escape';
+ export interface List {
+ type: 'list';
raw: string;
- text: string;
+ ordered: boolean;
+ start: number | '';
+ loose: boolean;
+ items: ListItem[];
}
- export interface Tag {
- type: 'html';
+ export interface ListItem {
+ type: 'list_item';
raw: string;
- inLink: boolean;
- inRawBlock: boolean;
+ task: boolean;
+ checked?: boolean;
+ loose: boolean;
text: string;
- block: boolean;
+ tokens: Token[];
}
- export interface Link {
- type: 'link';
+ export interface Paragraph {
+ type: 'paragraph';
raw: string;
- href: string;
- title?: string | null;
+ pre?: boolean;
text: string;
tokens: Token[];
}
- export interface Image {
- type: 'image';
+ export interface Space {
+ type: 'space';
raw: string;
- href: string;
- title: string | null;
- text: string;
}
export interface Strong {
@@ -175,38 +173,40 @@ export namespace Tokens {
tokens: Token[];
}
- export interface Em {
- type: 'em';
+ export interface Table {
+ type: 'table';
raw: string;
- text: string;
- tokens: Token[];
+ align: Array<'center' | 'left' | 'right' | null>;
+ header: TableCell[];
+ rows: TableCell[][];
}
- export interface Codespan {
- type: 'codespan';
- raw: string;
+ export interface TableCell {
text: string;
+ tokens: Token[];
+ header: boolean;
+ align: 'center' | 'left' | 'right' | null;
}
- export interface Br {
- type: 'br';
- raw: string;
+ export interface TableRow {
+ text: string;
}
- export interface Del {
- type: 'del';
+ export interface Tag {
+ type: 'html';
raw: string;
+ inLink: boolean;
+ inRawBlock: boolean;
text: string;
- tokens: Token[];
+ block: boolean;
}
- export interface Generic {
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- [index: string]: any;
-
- type: string;
+ export interface Text {
+ type: 'text';
raw: string;
+ text: string;
tokens?: Token[];
+ escaped?: boolean;
}
}
From 6cf6cb3a481414483fddcbe3f8054ed68ba65153 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Tue, 12 Nov 2024 19:47:28 -0500
Subject: [PATCH 09/11] fix: typeof plus !== null check
---
docs/demo/demo.js | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docs/demo/demo.js b/docs/demo/demo.js
index 1f274cc29c..48f84ad8ba 100644
--- a/docs/demo/demo.js
+++ b/docs/demo/demo.js
@@ -185,7 +185,7 @@ function setOptions(opts) {
$optionsElem.value = JSON.stringify(
opts,
(key, value) => {
- if (typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
+ if (value !== null && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
return undefined;
}
return value;
From 3bc9c136423a5c22369f78a151572f11186bb219 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Tue, 12 Nov 2024 19:47:44 -0500
Subject: [PATCH 10/11] chore: type test for .parse, .use
---
test/types/marked.ts | 25 +++++++++++++++++++++++++
1 file changed, 25 insertions(+)
diff --git a/test/types/marked.ts b/test/types/marked.ts
index 80a3b565f5..64968caadf 100644
--- a/test/types/marked.ts
+++ b/test/types/marked.ts
@@ -373,3 +373,28 @@ import { inline } from 'marked';
// Rules is exported
import type { Rules } from 'marked';
+marked.parse('', {
+ async: undefined,
+ breaks: undefined,
+ extensions: undefined,
+ gfm: undefined,
+ hooks: undefined,
+ pedantic: undefined,
+ renderer: undefined,
+ silent: undefined,
+ tokenizer: undefined,
+ walkTokens: undefined,
+});
+
+marked.use({
+ async: undefined,
+ breaks: undefined,
+ extensions: undefined,
+ gfm: undefined,
+ hooks: undefined,
+ pedantic: undefined,
+ renderer: undefined,
+ silent: undefined,
+ tokenizer: undefined,
+ walkTokens: undefined,
+});
From a93bfe0f88084e05a16d36c862b712edfdeed5e3 Mon Sep 17 00:00:00 2001
From: Spencer Whitehead
Date: Sat, 16 Nov 2024 23:39:27 -0500
Subject: [PATCH 11/11] fix: if check
---
src/helpers.ts | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/src/helpers.ts b/src/helpers.ts
index 60a75fb458..eb0871e93b 100644
--- a/src/helpers.ts
+++ b/src/helpers.ts
@@ -72,7 +72,7 @@ export function splitCells(tableRow: string, count?: number) {
if (!cells[0].trim()) {
cells.shift();
}
- if (!cells.at(-1)?.trim()) {
+ if (cells.length > 0 && !cells.at(-1)?.trim()) {
cells.pop();
}