mirror of
https://github.com/DavidAnson/markdownlint.git
synced 2025-09-22 05:40:48 +02:00
Add tests for micromark helpers filterByPredicate, filterByTypes, and getMicromarkEvents.
This commit is contained in:
parent
a87f40efd8
commit
5302ee45de
3 changed files with 68 additions and 8 deletions
|
@ -1462,13 +1462,13 @@ var _require =
|
|||
*/
|
||||
|
||||
/**
|
||||
* Parses a Markdown document and returns (frozen) tokens.
|
||||
* Parses a Markdown document and returns Micromark events.
|
||||
*
|
||||
* @param {string} markdown Markdown document.
|
||||
* @param {Object} [options] Options for micromark.
|
||||
* @returns {Token[]} Micromark tokens (frozen).
|
||||
* @returns {Object[]} Micromark events.
|
||||
*/
|
||||
function micromarkParse(markdown) {
|
||||
function getMicromarkEvents(markdown) {
|
||||
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
||||
// Customize options object to add useful extensions
|
||||
options.extensions || (options.extensions = []);
|
||||
|
@ -1484,6 +1484,20 @@ function micromarkParse(markdown) {
|
|||
};
|
||||
var chunks = preprocess()(markdown, encoding, eol);
|
||||
var events = postprocess(parseContext.document().write(chunks));
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a Markdown document and returns (frozen) tokens.
|
||||
*
|
||||
* @param {string} markdown Markdown document.
|
||||
* @param {Object} [options] Options for micromark.
|
||||
* @returns {Token[]} Micromark tokens (frozen).
|
||||
*/
|
||||
function micromarkParse(markdown) {
|
||||
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
||||
// Use micromark to parse document into Events
|
||||
var events = getMicromarkEvents(markdown, options);
|
||||
|
||||
// Create Token objects
|
||||
var document = [];
|
||||
|
@ -1643,6 +1657,7 @@ module.exports = {
|
|||
filterByPredicate: filterByPredicate,
|
||||
filterByTypes: filterByTypes,
|
||||
getHtmlTagInfo: getHtmlTagInfo,
|
||||
getMicromarkEvents: getMicromarkEvents,
|
||||
getTokenTextByType: getTokenTextByType,
|
||||
matchAndGetTokensByType: matchAndGetTokensByType
|
||||
};
|
||||
|
|
|
@ -23,13 +23,13 @@ const { gfmAutolinkLiteral, gfmFootnote, parse, postprocess, preprocess } =
|
|||
*/
|
||||
|
||||
/**
|
||||
* Parses a Markdown document and returns (frozen) tokens.
|
||||
* Parses a Markdown document and returns Micromark events.
|
||||
*
|
||||
* @param {string} markdown Markdown document.
|
||||
* @param {Object} [options] Options for micromark.
|
||||
* @returns {Token[]} Micromark tokens (frozen).
|
||||
* @returns {Object[]} Micromark events.
|
||||
*/
|
||||
function micromarkParse(markdown, options = {}) {
|
||||
function getMicromarkEvents(markdown, options = {}) {
|
||||
|
||||
// Customize options object to add useful extensions
|
||||
options.extensions ||= [];
|
||||
|
@ -43,6 +43,20 @@ function micromarkParse(markdown, options = {}) {
|
|||
parseContext.defined.includes = (searchElement) => searchElement.length > 0;
|
||||
const chunks = preprocess()(markdown, encoding, eol);
|
||||
const events = postprocess(parseContext.document().write(chunks));
|
||||
return events;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a Markdown document and returns (frozen) tokens.
|
||||
*
|
||||
* @param {string} markdown Markdown document.
|
||||
* @param {Object} [options] Options for micromark.
|
||||
* @returns {Token[]} Micromark tokens (frozen).
|
||||
*/
|
||||
function micromarkParse(markdown, options = {}) {
|
||||
|
||||
// Use micromark to parse document into Events
|
||||
const events = getMicromarkEvents(markdown, options);
|
||||
|
||||
// Create Token objects
|
||||
const document = [];
|
||||
|
@ -186,6 +200,7 @@ module.exports = {
|
|||
filterByPredicate,
|
||||
filterByTypes,
|
||||
getHtmlTagInfo,
|
||||
getMicromarkEvents,
|
||||
getTokenTextByType,
|
||||
matchAndGetTokensByType
|
||||
};
|
||||
|
|
|
@ -3,11 +3,41 @@
|
|||
import fs from "node:fs/promises";
|
||||
import test from "ava";
|
||||
import { newLineRe } from "../helpers/helpers.js";
|
||||
import { parse } from "../helpers/micromark.cjs";
|
||||
import { filterByPredicate, filterByTypes, getMicromarkEvents, parse }
|
||||
from "../helpers/micromark.cjs";
|
||||
|
||||
test("parse", async(t) => {
|
||||
t.plan(1);
|
||||
const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8");
|
||||
const document = parse(content.split(newLineRe).join("\n"));
|
||||
const normalizedContent = content.split(newLineRe).join("\n");
|
||||
const document = parse(normalizedContent);
|
||||
t.snapshot(document, "Unexpected tokens");
|
||||
});
|
||||
|
||||
test("getMicromarkEvents/filterByPredicate", async(t) => {
|
||||
t.plan(1);
|
||||
const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8");
|
||||
const normalizedContent = content.split(newLineRe).join("\n");
|
||||
const events = getMicromarkEvents(normalizedContent);
|
||||
const eventTypes = events
|
||||
.filter((event) => event[0] === "enter")
|
||||
.map((event) => event[1].type);
|
||||
const document = parse(normalizedContent);
|
||||
const tokens = filterByPredicate(document, () => true);
|
||||
const tokenTypes = tokens.map((token) => token.type);
|
||||
t.deepEqual(tokenTypes, eventTypes);
|
||||
});
|
||||
|
||||
test("filterByTypes", async(t) => {
|
||||
t.plan(6);
|
||||
const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8");
|
||||
const normalizedContent = content.split(newLineRe).join("\n");
|
||||
const document = parse(normalizedContent);
|
||||
const tokens = filterByTypes(
|
||||
document,
|
||||
[ "atxHeadingText", "codeText", "htmlText", "setextHeadingText" ]
|
||||
);
|
||||
for (const token of tokens) {
|
||||
t.true(token.type.endsWith("Text"));
|
||||
}
|
||||
});
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue