mirror of
https://github.com/DavidAnson/markdownlint.git
synced 2025-12-17 22:40:13 +01:00
Reimplement MD036/no-emphasis-as-heading using micromark tokens.
This commit is contained in:
parent
6f6348a63d
commit
82441faa91
3 changed files with 66 additions and 84 deletions
|
|
@ -1559,8 +1559,8 @@ function inHtmlFlow(token) {
|
||||||
* Determines a list of Micromark tokens matches and returns a subset.
|
* Determines a list of Micromark tokens matches and returns a subset.
|
||||||
*
|
*
|
||||||
* @param {Token[]} tokens Micromark tokens.
|
* @param {Token[]} tokens Micromark tokens.
|
||||||
* @param {string[]} matchTypes Types to match.
|
* @param {TokenType[]} matchTypes Types to match.
|
||||||
* @param {string[]} [resultTypes] Types to return.
|
* @param {TokenType[]} [resultTypes] Types to return.
|
||||||
* @returns {Token[] | null} Matching tokens.
|
* @returns {Token[] | null} Matching tokens.
|
||||||
*/
|
*/
|
||||||
function matchAndGetTokensByType(tokens, matchTypes, resultTypes) {
|
function matchAndGetTokensByType(tokens, matchTypes, resultTypes) {
|
||||||
|
|
@ -5315,6 +5315,14 @@ module.exports = {
|
||||||
|
|
||||||
|
|
||||||
const { addErrorContext, allPunctuation } = __webpack_require__(/*! ../helpers */ "../helpers/helpers.js");
|
const { addErrorContext, allPunctuation } = __webpack_require__(/*! ../helpers */ "../helpers/helpers.js");
|
||||||
|
const { filterByTypes, matchAndGetTokensByType } = __webpack_require__(/*! ../helpers/micromark.cjs */ "../helpers/micromark.cjs");
|
||||||
|
|
||||||
|
/** @typedef {import("../helpers/micromark.cjs").TokenType} TokenType */
|
||||||
|
/** @type {Map<TokenType, TokenType[]>} */
|
||||||
|
const emphasisAndChildrenTypes = new Map([
|
||||||
|
[ "emphasis", [ "emphasisSequence", "emphasisText", "emphasisSequence" ] ],
|
||||||
|
[ "strong", [ "strongSequence", "strongText", "strongSequence" ] ]
|
||||||
|
]);
|
||||||
|
|
||||||
// eslint-disable-next-line jsdoc/valid-types
|
// eslint-disable-next-line jsdoc/valid-types
|
||||||
/** @type import("./markdownlint").Rule */
|
/** @type import("./markdownlint").Rule */
|
||||||
|
|
@ -5322,50 +5330,33 @@ module.exports = {
|
||||||
"names": [ "MD036", "no-emphasis-as-heading" ],
|
"names": [ "MD036", "no-emphasis-as-heading" ],
|
||||||
"description": "Emphasis used instead of a heading",
|
"description": "Emphasis used instead of a heading",
|
||||||
"tags": [ "headings", "emphasis" ],
|
"tags": [ "headings", "emphasis" ],
|
||||||
"parser": "markdownit",
|
"parser": "micromark",
|
||||||
"function": function MD036(params, onError) {
|
"function": function MD036(params, onError) {
|
||||||
let punctuation = params.config.punctuation;
|
let punctuation = params.config.punctuation;
|
||||||
punctuation =
|
punctuation = String((punctuation === undefined) ? allPunctuation : punctuation);
|
||||||
String((punctuation === undefined) ? allPunctuation : punctuation);
|
const punctuationRe = new RegExp("[" + punctuation + "]$");
|
||||||
const re = new RegExp("[" + punctuation + "]$");
|
const paragraphTokens =
|
||||||
// eslint-disable-next-line jsdoc/require-jsdoc
|
filterByTypes(params.parsers.micromark.tokens, [ "paragraph" ]).
|
||||||
function base(token) {
|
filter((token) =>
|
||||||
if (token.type === "paragraph_open") {
|
(token.parent?.type === "content") && !token.parent?.parent && (token.children.length === 1)
|
||||||
return function inParagraph(t) {
|
);
|
||||||
// Always paragraph_open/inline/paragraph_close,
|
for (const paragraphToken of paragraphTokens) {
|
||||||
const children = t.children.filter(function notEmptyText(child) {
|
const childToken = paragraphToken.children[0];
|
||||||
return (child.type !== "text") || (child.content !== "");
|
for (const [ emphasisType, emphasisChildrenTypes ] of emphasisAndChildrenTypes) {
|
||||||
});
|
if (childToken.type === emphasisType) {
|
||||||
if ((children.length === 3) &&
|
const matchingTokens = matchAndGetTokensByType(childToken.children, emphasisChildrenTypes);
|
||||||
((children[0].type === "strong_open") ||
|
if (matchingTokens) {
|
||||||
(children[0].type === "em_open")) &&
|
const textToken = matchingTokens[1];
|
||||||
(children[1].type === "text") &&
|
if (
|
||||||
!re.test(children[1].content)) {
|
(textToken.children.length === 1) &&
|
||||||
addErrorContext(onError, t.lineNumber,
|
(textToken.children[0].type === "data") &&
|
||||||
children[1].content);
|
!punctuationRe.test(textToken.text)
|
||||||
|
) {
|
||||||
|
addErrorContext(onError, textToken.startLine, textToken.text);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return base;
|
}
|
||||||
};
|
|
||||||
} else if (token.type === "blockquote_open") {
|
|
||||||
return function inBlockquote(t) {
|
|
||||||
if (t.type !== "blockquote_close") {
|
|
||||||
return inBlockquote;
|
|
||||||
}
|
|
||||||
return base;
|
|
||||||
};
|
|
||||||
} else if (token.type === "list_item_open") {
|
|
||||||
return function inListItem(t) {
|
|
||||||
if (t.type !== "list_item_close") {
|
|
||||||
return inListItem;
|
|
||||||
}
|
|
||||||
return base;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
return base;
|
|
||||||
}
|
|
||||||
let state = base;
|
|
||||||
for (const token of params.parsers.markdownit.tokens) {
|
|
||||||
state = state(token);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
|
|
@ -381,8 +381,8 @@ function inHtmlFlow(token) {
|
||||||
* Determines a list of Micromark tokens matches and returns a subset.
|
* Determines a list of Micromark tokens matches and returns a subset.
|
||||||
*
|
*
|
||||||
* @param {Token[]} tokens Micromark tokens.
|
* @param {Token[]} tokens Micromark tokens.
|
||||||
* @param {string[]} matchTypes Types to match.
|
* @param {TokenType[]} matchTypes Types to match.
|
||||||
* @param {string[]} [resultTypes] Types to return.
|
* @param {TokenType[]} [resultTypes] Types to return.
|
||||||
* @returns {Token[] | null} Matching tokens.
|
* @returns {Token[] | null} Matching tokens.
|
||||||
*/
|
*/
|
||||||
function matchAndGetTokensByType(tokens, matchTypes, resultTypes) {
|
function matchAndGetTokensByType(tokens, matchTypes, resultTypes) {
|
||||||
|
|
|
||||||
71
lib/md036.js
71
lib/md036.js
|
|
@ -3,6 +3,14 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
|
|
||||||
const { addErrorContext, allPunctuation } = require("../helpers");
|
const { addErrorContext, allPunctuation } = require("../helpers");
|
||||||
|
const { filterByTypes, matchAndGetTokensByType } = require("../helpers/micromark.cjs");
|
||||||
|
|
||||||
|
/** @typedef {import("../helpers/micromark.cjs").TokenType} TokenType */
|
||||||
|
/** @type {Map<TokenType, TokenType[]>} */
|
||||||
|
const emphasisAndChildrenTypes = new Map([
|
||||||
|
[ "emphasis", [ "emphasisSequence", "emphasisText", "emphasisSequence" ] ],
|
||||||
|
[ "strong", [ "strongSequence", "strongText", "strongSequence" ] ]
|
||||||
|
]);
|
||||||
|
|
||||||
// eslint-disable-next-line jsdoc/valid-types
|
// eslint-disable-next-line jsdoc/valid-types
|
||||||
/** @type import("./markdownlint").Rule */
|
/** @type import("./markdownlint").Rule */
|
||||||
|
|
@ -10,50 +18,33 @@ module.exports = {
|
||||||
"names": [ "MD036", "no-emphasis-as-heading" ],
|
"names": [ "MD036", "no-emphasis-as-heading" ],
|
||||||
"description": "Emphasis used instead of a heading",
|
"description": "Emphasis used instead of a heading",
|
||||||
"tags": [ "headings", "emphasis" ],
|
"tags": [ "headings", "emphasis" ],
|
||||||
"parser": "markdownit",
|
"parser": "micromark",
|
||||||
"function": function MD036(params, onError) {
|
"function": function MD036(params, onError) {
|
||||||
let punctuation = params.config.punctuation;
|
let punctuation = params.config.punctuation;
|
||||||
punctuation =
|
punctuation = String((punctuation === undefined) ? allPunctuation : punctuation);
|
||||||
String((punctuation === undefined) ? allPunctuation : punctuation);
|
const punctuationRe = new RegExp("[" + punctuation + "]$");
|
||||||
const re = new RegExp("[" + punctuation + "]$");
|
const paragraphTokens =
|
||||||
// eslint-disable-next-line jsdoc/require-jsdoc
|
filterByTypes(params.parsers.micromark.tokens, [ "paragraph" ]).
|
||||||
function base(token) {
|
filter((token) =>
|
||||||
if (token.type === "paragraph_open") {
|
(token.parent?.type === "content") && !token.parent?.parent && (token.children.length === 1)
|
||||||
return function inParagraph(t) {
|
);
|
||||||
// Always paragraph_open/inline/paragraph_close,
|
for (const paragraphToken of paragraphTokens) {
|
||||||
const children = t.children.filter(function notEmptyText(child) {
|
const childToken = paragraphToken.children[0];
|
||||||
return (child.type !== "text") || (child.content !== "");
|
for (const [ emphasisType, emphasisChildrenTypes ] of emphasisAndChildrenTypes) {
|
||||||
});
|
if (childToken.type === emphasisType) {
|
||||||
if ((children.length === 3) &&
|
const matchingTokens = matchAndGetTokensByType(childToken.children, emphasisChildrenTypes);
|
||||||
((children[0].type === "strong_open") ||
|
if (matchingTokens) {
|
||||||
(children[0].type === "em_open")) &&
|
const textToken = matchingTokens[1];
|
||||||
(children[1].type === "text") &&
|
if (
|
||||||
!re.test(children[1].content)) {
|
(textToken.children.length === 1) &&
|
||||||
addErrorContext(onError, t.lineNumber,
|
(textToken.children[0].type === "data") &&
|
||||||
children[1].content);
|
!punctuationRe.test(textToken.text)
|
||||||
|
) {
|
||||||
|
addErrorContext(onError, textToken.startLine, textToken.text);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return base;
|
}
|
||||||
};
|
|
||||||
} else if (token.type === "blockquote_open") {
|
|
||||||
return function inBlockquote(t) {
|
|
||||||
if (t.type !== "blockquote_close") {
|
|
||||||
return inBlockquote;
|
|
||||||
}
|
|
||||||
return base;
|
|
||||||
};
|
|
||||||
} else if (token.type === "list_item_open") {
|
|
||||||
return function inListItem(t) {
|
|
||||||
if (t.type !== "list_item_close") {
|
|
||||||
return inListItem;
|
|
||||||
}
|
|
||||||
return base;
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
return base;
|
|
||||||
}
|
|
||||||
let state = base;
|
|
||||||
for (const token of params.parsers.markdownit.tokens) {
|
|
||||||
state = state(token);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue