Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove unused plus typescript tightening #3527

23 changes: 0 additions & 23 deletions bower.json
UziTech marked this conversation as resolved.
Show resolved Hide resolved

This file was deleted.

2 changes: 1 addition & 1 deletion docs/demo/demo.js
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ function setOptions(opts) {
$optionsElem.value = JSON.stringify(
opts,
(key, value) => {
if (value && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
UziTech marked this conversation as resolved.
Show resolved Hide resolved
if (value !== null && typeof value === 'object' && Object.getPrototypeOf(value) !== Object.prototype) {
return undefined;
}
return value;
Expand Down
7 changes: 0 additions & 7 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@
"rollup": "^4.25.0",
"semantic-release": "^24.2.0",
"titleize": "^4.0.0",
"ts-expect": "^1.3.0",
"tslib": "^2.8.1",
"typescript": "5.6.3"
},
Expand Down
2 changes: 1 addition & 1 deletion src/Hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import type { Token, TokensList } from './Tokens.ts';

export class _Hooks {
options: MarkedOptions;
block: boolean | undefined;
block?: boolean;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note that these do not have the same meaning.

https://x.com/DavidKPiano/status/1856701854102557026

Although I think in this case that's okay.


constructor(options?: MarkedOptions) {
this.options = options || _defaults;
Expand Down
4 changes: 2 additions & 2 deletions src/Instance.ts
Original file line number Diff line number Diff line change
Expand Up @@ -265,11 +265,11 @@ export class Marked {
type overloadedParse = {
(src: string, options: MarkedOptions & { async: true }): Promise<string>;
(src: string, options: MarkedOptions & { async: false }): string;
(src: string, options?: MarkedOptions | undefined | null): string | Promise<string>;
(src: string, options?: MarkedOptions | null): string | Promise<string>;
};

// eslint-disable-next-line @typescript-eslint/no-explicit-any
const parse: overloadedParse = (src: string, options?: MarkedOptions | undefined | null): any => {
const parse: overloadedParse = (src: string, options?: MarkedOptions | null): any => {
const origOpt = { ...options };
const opt = { ...this.defaults, ...origOpt };

Expand Down
114 changes: 57 additions & 57 deletions src/Lexer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { _Tokenizer } from './Tokenizer.ts';
import { _defaults } from './defaults.ts';
import { other, block, inline } from './rules.ts';
import type { Token, TokensList, Tokens } from './Tokens.ts';
import type { MarkedOptions, TokenizerExtension } from './MarkedOptions.ts';
import type { MarkedOptions } from './MarkedOptions.ts';

/**
* Block Lexer
Expand Down Expand Up @@ -85,8 +85,7 @@ export class _Lexer {
* Preprocessing
*/
lex(src: string) {
src = src
.replace(other.carriageReturn, '\n');
src = src.replace(other.carriageReturn, '\n');

this.blockTokens(src, this.tokens);

Expand All @@ -109,31 +108,28 @@ export class _Lexer {
src = src.replace(other.tabCharGlobal, ' ').replace(other.spaceLine, '');
}

let token: Tokens.Generic | undefined;
let lastToken;
let cutSrc;

while (src) {
if (this.options.extensions
&& this.options.extensions.block
&& this.options.extensions.block.some((extTokenizer: TokenizerExtension['tokenizer']) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
tokens.push(token);
return true;
}
return false;
})) {
let token: Tokens.Generic | undefined;

if (this.options.extensions?.block?.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
tokens.push(token);
return true;
}
return false;
})) {
continue;
}

// newline
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
if (token.raw.length === 1 && tokens.length > 0) {
const lastToken = tokens.at(-1);
if (token.raw.length === 1 && lastToken !== undefined) {
// if there's a single \n as a spacer, it's terminating the last line,
// so move it there so that we don't get unnecessary paragraph tags
tokens[tokens.length - 1].raw += '\n';
lastToken.raw += '\n';
} else {
tokens.push(token);
}
Expand All @@ -143,12 +139,12 @@ export class _Lexer {
// code
if (token = this.tokenizer.code(src)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
const lastToken = tokens.at(-1);
// An indented code block cannot interrupt a paragraph.
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
this.inlineQueue.at(-1)!.src = lastToken.text;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not a fan of using ! but it is logically the same as the previous code.

We should look at making this safer in a follow up PR.

} else {
tokens.push(token);
}
Expand Down Expand Up @@ -200,11 +196,11 @@ export class _Lexer {
// def
if (token = this.tokenizer.def(src)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
if (lastToken && (lastToken.type === 'paragraph' || lastToken.type === 'text')) {
const lastToken = tokens.at(-1);
if (lastToken?.type === 'paragraph' || lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.raw;
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
this.inlineQueue.at(-1)!.src = lastToken.text;
} else if (!this.tokens.links[token.tag]) {
this.tokens.links[token.tag] = {
href: token.href,
Expand All @@ -230,43 +226,45 @@ export class _Lexer {

// top-level paragraph
// prevent paragraph consuming extensions by clipping 'src' to extension start
cutSrc = src;
if (this.options.extensions && this.options.extensions.startBlock) {
let cutSrc = src;
if (this.options.extensions?.startBlock) {
let startIndex = Infinity;
const tempSrc = src.slice(1);
let tempStart;
this.options.extensions.startBlock.forEach((getStartIndex) => {
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
if (typeof tempStart === 'number' && tempStart >= 0) {
startIndex = Math.min(startIndex, tempStart);
}
});
if (startIndex < Infinity && startIndex >= 0) {
cutSrc = src.substring(0, startIndex + 1);
}
}
if (this.state.top && (token = this.tokenizer.paragraph(cutSrc))) {
lastToken = tokens[tokens.length - 1];
const lastToken = tokens.at(-1);
if (lastParagraphClipped && lastToken?.type === 'paragraph') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue.pop();
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
this.inlineQueue.at(-1)!.src = lastToken.text;
} else {
tokens.push(token);
}
lastParagraphClipped = (cutSrc.length !== src.length);
lastParagraphClipped = cutSrc.length !== src.length;
src = src.substring(token.raw.length);
continue;
}

// text
if (token = this.tokenizer.text(src)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
if (lastToken && lastToken.type === 'text') {
const lastToken = tokens.at(-1);
if (lastToken?.type === 'text') {
lastToken.raw += '\n' + token.raw;
lastToken.text += '\n' + token.text;
this.inlineQueue.pop();
this.inlineQueue[this.inlineQueue.length - 1].src = lastToken.text;
this.inlineQueue.at(-1)!.src = lastToken.text;
} else {
tokens.push(token);
}
Expand Down Expand Up @@ -297,20 +295,19 @@ export class _Lexer {
* Lexing/Compiling
*/
inlineTokens(src: string, tokens: Token[] = []): Token[] {
let token, lastToken, cutSrc;

// String with links masked to avoid interference with em and strong
let maskedSrc = src;
let match;
let keepPrevChar, prevChar;
let match: RegExpExecArray | null = null;

// Mask out reflinks
if (this.tokens.links) {
const links = Object.keys(this.tokens.links);
if (links.length > 0) {
while ((match = this.tokenizer.rules.inline.reflinkSearch.exec(maskedSrc)) != null) {
if (links.includes(match[0].slice(match[0].lastIndexOf('[') + 1, -1))) {
maskedSrc = maskedSrc.slice(0, match.index) + '[' + 'a'.repeat(match[0].length - 2) + ']' + maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
maskedSrc = maskedSrc.slice(0, match.index)
+ '[' + 'a'.repeat(match[0].length - 2) + ']'
+ maskedSrc.slice(this.tokenizer.rules.inline.reflinkSearch.lastIndex);
}
}
}
Expand All @@ -325,23 +322,25 @@ export class _Lexer {
maskedSrc = maskedSrc.slice(0, match.index) + '++' + maskedSrc.slice(this.tokenizer.rules.inline.anyPunctuation.lastIndex);
}

let keepPrevChar = false;
let prevChar = '';
while (src) {
if (!keepPrevChar) {
prevChar = '';
}
keepPrevChar = false;

let token: Tokens.Generic | undefined;

// extensions
if (this.options.extensions
&& this.options.extensions.inline
&& this.options.extensions.inline.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
tokens.push(token);
return true;
}
return false;
})) {
if (this.options.extensions?.inline?.some((extTokenizer) => {
if (token = extTokenizer.call({ lexer: this }, src, tokens)) {
src = src.substring(token.raw.length);
tokens.push(token);
return true;
}
return false;
})) {
continue;
}

Expand All @@ -355,7 +354,6 @@ export class _Lexer {
// tag
if (token = this.tokenizer.tag(src)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
tokens.push(token);
continue;
}
Expand All @@ -370,8 +368,8 @@ export class _Lexer {
// reflink, nolink
if (token = this.tokenizer.reflink(src, this.tokens.links)) {
src = src.substring(token.raw.length);
lastToken = tokens[tokens.length - 1];
if (lastToken && token.type === 'text' && lastToken.type === 'text') {
const lastToken = tokens.at(-1);
if (token.type === 'text' && lastToken?.type === 'text') {
lastToken.raw += token.raw;
lastToken.text += token.text;
} else {
Expand Down Expand Up @@ -424,14 +422,16 @@ export class _Lexer {

// text
// prevent inlineText consuming extensions by clipping 'src' to extension start
cutSrc = src;
if (this.options.extensions && this.options.extensions.startInline) {
let cutSrc = src;
if (this.options.extensions?.startInline) {
let startIndex = Infinity;
const tempSrc = src.slice(1);
let tempStart;
this.options.extensions.startInline.forEach((getStartIndex) => {
tempStart = getStartIndex.call({ lexer: this }, tempSrc);
if (typeof tempStart === 'number' && tempStart >= 0) { startIndex = Math.min(startIndex, tempStart); }
if (typeof tempStart === 'number' && tempStart >= 0) {
startIndex = Math.min(startIndex, tempStart);
}
});
if (startIndex < Infinity && startIndex >= 0) {
cutSrc = src.substring(0, startIndex + 1);
Expand All @@ -443,8 +443,8 @@ export class _Lexer {
prevChar = token.raw.slice(-1);
}
keepPrevChar = true;
lastToken = tokens[tokens.length - 1];
if (lastToken && lastToken.type === 'text') {
const lastToken = tokens.at(-1);
if (lastToken?.type === 'text') {
lastToken.raw += token.raw;
lastToken.text += token.text;
} else {
Expand Down
Loading