Address new violations of @stylistic/no-extra-parens.

This commit is contained in:
David Anson 2024-10-06 21:03:07 -07:00
parent 4f6b17934a
commit 61a9c2427e
5 changed files with 7 additions and 7 deletions

View file

@ -45,7 +45,7 @@ module.exports = {
"parser": "micromark",
"function": (params, onError) => {
const blockquotes = params.parsers.micromark.tokens
.filter(((token) => token.type === "blockQuote"));
.filter((token) => token.type === "blockQuote");
for (const blockquote of blockquotes) {
const lines = blockquote.endLine - blockquote.startLine + 1;
onError({
@ -70,7 +70,7 @@ module.exports = {
"parser": "markdownit",
"function": (params, onError) => {
const blockquotes = params.parsers.markdownit.tokens
.filter(((token) => token.type === "blockquote_open"));
.filter((token) => token.type === "blockquote_open");
for (const blockquote of blockquotes) {
const [ startIndex, endIndex ] = blockquote.map;
const lines = endIndex - startIndex;

View file

@ -86,7 +86,7 @@ test("filterByPredicate/filterByTypes", async(t) => {
t.plan(1);
const tokens = await testTokens;
const byPredicate = filterByPredicate(tokens, () => true);
const allTypes = new Set(byPredicate.map(((token) => token.type)));
const allTypes = new Set(byPredicate.map((token) => token.type));
const byTypes = filterByTypes(tokens, [ ...allTypes.values() ], true);
t.deepEqual(byPredicate, byTypes);
});

View file

@ -16,7 +16,7 @@ module.exports = [
"parser": "micromark",
"function": (params, onError) => {
const blockquotes = params.parsers.micromark.tokens
.filter(((token) => token.type === "blockQuote"));
.filter((token) => token.type === "blockQuote");
for (const blockquote of blockquotes) {
const lines = blockquote.endLine - blockquote.startLine + 1;
onError({
@ -39,7 +39,7 @@ module.exports = [
"parser": "markdownit",
"function": (params, onError) => {
const blockquotes = params.parsers.markdownit.tokens
.filter(((token) => token.type === "blockquote_open"));
.filter((token) => token.type === "blockquote_open");
for (const blockquote of blockquotes) {
const [ startIndex, endIndex ] = blockquote.map;
const lines = endIndex - startIndex;

View file

@ -15,7 +15,7 @@ module.exports = {
"parser": "markdownit",
"function": (params, onError) => {
const fences = params.parsers.markdownit.tokens
.filter(((token) => token.type === "fence"));
.filter((token) => token.type === "fence");
for (const fence of fences) {
if (languageJavaScript.test(fence.info)) {
const results = linter.verify(fence.content, js.configs.recommended);

View file

@ -13,7 +13,7 @@ module.exports = {
"asynchronous": true,
"function": (params, onError) => {
const fences = params.parsers.markdownit.tokens
.filter(((token) => token.type === "fence"));
.filter((token) => token.type === "fence");
for (const fence of fences) {
if (/jsonc?/i.test(fence.info)) {
const errors = [];