mirror of
https://github.com/DavidAnson/markdownlint.git
synced 2025-09-21 21:30:47 +02:00
Address new violations of @stylistic/no-extra-parens.
This commit is contained in:
parent
4f6b17934a
commit
61a9c2427e
5 changed files with 7 additions and 7 deletions
|
@ -45,7 +45,7 @@ module.exports = {
|
|||
"parser": "micromark",
|
||||
"function": (params, onError) => {
|
||||
const blockquotes = params.parsers.micromark.tokens
|
||||
.filter(((token) => token.type === "blockQuote"));
|
||||
.filter((token) => token.type === "blockQuote");
|
||||
for (const blockquote of blockquotes) {
|
||||
const lines = blockquote.endLine - blockquote.startLine + 1;
|
||||
onError({
|
||||
|
@ -70,7 +70,7 @@ module.exports = {
|
|||
"parser": "markdownit",
|
||||
"function": (params, onError) => {
|
||||
const blockquotes = params.parsers.markdownit.tokens
|
||||
.filter(((token) => token.type === "blockquote_open"));
|
||||
.filter((token) => token.type === "blockquote_open");
|
||||
for (const blockquote of blockquotes) {
|
||||
const [ startIndex, endIndex ] = blockquote.map;
|
||||
const lines = endIndex - startIndex;
|
||||
|
|
|
@ -86,7 +86,7 @@ test("filterByPredicate/filterByTypes", async(t) => {
|
|||
t.plan(1);
|
||||
const tokens = await testTokens;
|
||||
const byPredicate = filterByPredicate(tokens, () => true);
|
||||
const allTypes = new Set(byPredicate.map(((token) => token.type)));
|
||||
const allTypes = new Set(byPredicate.map((token) => token.type));
|
||||
const byTypes = filterByTypes(tokens, [ ...allTypes.values() ], true);
|
||||
t.deepEqual(byPredicate, byTypes);
|
||||
});
|
||||
|
|
|
@ -16,7 +16,7 @@ module.exports = [
|
|||
"parser": "micromark",
|
||||
"function": (params, onError) => {
|
||||
const blockquotes = params.parsers.micromark.tokens
|
||||
.filter(((token) => token.type === "blockQuote"));
|
||||
.filter((token) => token.type === "blockQuote");
|
||||
for (const blockquote of blockquotes) {
|
||||
const lines = blockquote.endLine - blockquote.startLine + 1;
|
||||
onError({
|
||||
|
@ -39,7 +39,7 @@ module.exports = [
|
|||
"parser": "markdownit",
|
||||
"function": (params, onError) => {
|
||||
const blockquotes = params.parsers.markdownit.tokens
|
||||
.filter(((token) => token.type === "blockquote_open"));
|
||||
.filter((token) => token.type === "blockquote_open");
|
||||
for (const blockquote of blockquotes) {
|
||||
const [ startIndex, endIndex ] = blockquote.map;
|
||||
const lines = endIndex - startIndex;
|
||||
|
|
|
@ -15,7 +15,7 @@ module.exports = {
|
|||
"parser": "markdownit",
|
||||
"function": (params, onError) => {
|
||||
const fences = params.parsers.markdownit.tokens
|
||||
.filter(((token) => token.type === "fence"));
|
||||
.filter((token) => token.type === "fence");
|
||||
for (const fence of fences) {
|
||||
if (languageJavaScript.test(fence.info)) {
|
||||
const results = linter.verify(fence.content, js.configs.recommended);
|
||||
|
|
|
@ -13,7 +13,7 @@ module.exports = {
|
|||
"asynchronous": true,
|
||||
"function": (params, onError) => {
|
||||
const fences = params.parsers.markdownit.tokens
|
||||
.filter(((token) => token.type === "fence"));
|
||||
.filter((token) => token.type === "fence");
|
||||
for (const fence of fences) {
|
||||
if (/jsonc?/i.test(fence.info)) {
|
||||
const errors = [];
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue