Add tests for micromark helpers filterByPredicate, filterByTypes, and getMicromarkEvents.

This commit is contained in:
David Anson 2023-02-10 20:37:32 -08:00
parent a87f40efd8
commit 5302ee45de
3 changed files with 68 additions and 8 deletions

View file

@ -1462,13 +1462,13 @@ var _require =
*/ */
/** /**
* Parses a Markdown document and returns (frozen) tokens. * Parses a Markdown document and returns Micromark events.
* *
* @param {string} markdown Markdown document. * @param {string} markdown Markdown document.
* @param {Object} [options] Options for micromark. * @param {Object} [options] Options for micromark.
* @returns {Token[]} Micromark tokens (frozen). * @returns {Object[]} Micromark events.
*/ */
function micromarkParse(markdown) { function getMicromarkEvents(markdown) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
// Customize options object to add useful extensions // Customize options object to add useful extensions
options.extensions || (options.extensions = []); options.extensions || (options.extensions = []);
@ -1484,6 +1484,20 @@ function micromarkParse(markdown) {
}; };
var chunks = preprocess()(markdown, encoding, eol); var chunks = preprocess()(markdown, encoding, eol);
var events = postprocess(parseContext.document().write(chunks)); var events = postprocess(parseContext.document().write(chunks));
return events;
}
/**
* Parses a Markdown document and returns (frozen) tokens.
*
* @param {string} markdown Markdown document.
* @param {Object} [options] Options for micromark.
* @returns {Token[]} Micromark tokens (frozen).
*/
function micromarkParse(markdown) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
// Use micromark to parse document into Events
var events = getMicromarkEvents(markdown, options);
// Create Token objects // Create Token objects
var document = []; var document = [];
@ -1643,6 +1657,7 @@ module.exports = {
filterByPredicate: filterByPredicate, filterByPredicate: filterByPredicate,
filterByTypes: filterByTypes, filterByTypes: filterByTypes,
getHtmlTagInfo: getHtmlTagInfo, getHtmlTagInfo: getHtmlTagInfo,
getMicromarkEvents: getMicromarkEvents,
getTokenTextByType: getTokenTextByType, getTokenTextByType: getTokenTextByType,
matchAndGetTokensByType: matchAndGetTokensByType matchAndGetTokensByType: matchAndGetTokensByType
}; };

View file

@ -23,13 +23,13 @@ const { gfmAutolinkLiteral, gfmFootnote, parse, postprocess, preprocess } =
*/ */
/** /**
* Parses a Markdown document and returns (frozen) tokens. * Parses a Markdown document and returns Micromark events.
* *
* @param {string} markdown Markdown document. * @param {string} markdown Markdown document.
* @param {Object} [options] Options for micromark. * @param {Object} [options] Options for micromark.
* @returns {Token[]} Micromark tokens (frozen). * @returns {Object[]} Micromark events.
*/ */
function micromarkParse(markdown, options = {}) { function getMicromarkEvents(markdown, options = {}) {
// Customize options object to add useful extensions // Customize options object to add useful extensions
options.extensions ||= []; options.extensions ||= [];
@ -43,6 +43,20 @@ function micromarkParse(markdown, options = {}) {
parseContext.defined.includes = (searchElement) => searchElement.length > 0; parseContext.defined.includes = (searchElement) => searchElement.length > 0;
const chunks = preprocess()(markdown, encoding, eol); const chunks = preprocess()(markdown, encoding, eol);
const events = postprocess(parseContext.document().write(chunks)); const events = postprocess(parseContext.document().write(chunks));
return events;
}
/**
* Parses a Markdown document and returns (frozen) tokens.
*
* @param {string} markdown Markdown document.
* @param {Object} [options] Options for micromark.
* @returns {Token[]} Micromark tokens (frozen).
*/
function micromarkParse(markdown, options = {}) {
// Use micromark to parse document into Events
const events = getMicromarkEvents(markdown, options);
// Create Token objects // Create Token objects
const document = []; const document = [];
@ -186,6 +200,7 @@ module.exports = {
filterByPredicate, filterByPredicate,
filterByTypes, filterByTypes,
getHtmlTagInfo, getHtmlTagInfo,
getMicromarkEvents,
getTokenTextByType, getTokenTextByType,
matchAndGetTokensByType matchAndGetTokensByType
}; };

View file

@ -3,11 +3,41 @@
import fs from "node:fs/promises"; import fs from "node:fs/promises";
import test from "ava"; import test from "ava";
import { newLineRe } from "../helpers/helpers.js"; import { newLineRe } from "../helpers/helpers.js";
import { parse } from "../helpers/micromark.cjs"; import { filterByPredicate, filterByTypes, getMicromarkEvents, parse }
from "../helpers/micromark.cjs";
test("parse", async(t) => { test("parse", async(t) => {
t.plan(1); t.plan(1);
const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8"); const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8");
const document = parse(content.split(newLineRe).join("\n")); const normalizedContent = content.split(newLineRe).join("\n");
const document = parse(normalizedContent);
t.snapshot(document, "Unexpected tokens"); t.snapshot(document, "Unexpected tokens");
}); });
test("getMicromarkEvents/filterByPredicate", async(t) => {
t.plan(1);
const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8");
const normalizedContent = content.split(newLineRe).join("\n");
const events = getMicromarkEvents(normalizedContent);
const eventTypes = events
.filter((event) => event[0] === "enter")
.map((event) => event[1].type);
const document = parse(normalizedContent);
const tokens = filterByPredicate(document, () => true);
const tokenTypes = tokens.map((token) => token.type);
t.deepEqual(tokenTypes, eventTypes);
});
test("filterByTypes", async(t) => {
t.plan(6);
const content = await fs.readFile("./test/every-markdown-syntax.md", "utf8");
const normalizedContent = content.split(newLineRe).join("\n");
const document = parse(normalizedContent);
const tokens = filterByTypes(
document,
[ "atxHeadingText", "codeText", "htmlText", "setextHeadingText" ]
);
for (const token of tokens) {
t.true(token.type.endsWith("Text"));
}
});