eslint-plugin-regexp
Advanced tools
Comparing version
@@ -7,5 +7,3 @@ "use strict"; | ||
const regexp_ast_analysis_1 = require("regexp-ast-analysis"); | ||
const regexpp_1 = require("regexpp"); | ||
const get_usage_of_pattern_1 = require("../utils/get-usage-of-pattern"); | ||
const reorder_alternatives_1 = require("../utils/reorder-alternatives"); | ||
const mention_1 = require("../utils/mention"); | ||
@@ -366,3 +364,3 @@ const partial_parser_1 = require("../utils/partial-parser"); | ||
case 3: { | ||
const reorder = (0, reorder_alternatives_1.canReorder)([alternative, ...others], flags); | ||
const reorder = (0, regexp_ast_analysis_1.canReorder)([alternative, ...others], flags); | ||
if (reorder) { | ||
@@ -542,9 +540,4 @@ for (const other of others) { | ||
function createVisitor(regexpContext) { | ||
const { patternAst, flagsString, flags, node, getRegexpLocation, getUsageOfPattern, } = regexpContext; | ||
const parser = refa_1.JS.Parser.fromAst({ | ||
pattern: patternAst, | ||
flags: new regexpp_1.RegExpParser().parseFlags([ | ||
...new Set((flagsString || "").replace(/[^gimsuy]/gu, "")), | ||
].join("")), | ||
}); | ||
const { flags, node, getRegexpLocation, getUsageOfPattern } = regexpContext; | ||
const parser = (0, regexp_ast_1.getParser)(regexpContext); | ||
function getFilterInfo(parentNode) { | ||
@@ -551,0 +544,0 @@ const usage = getUsageOfPattern(); |
@@ -7,2 +7,3 @@ "use strict"; | ||
const mention_1 = require("../utils/mention"); | ||
const regexp_ast_1 = require("../utils/regexp-ast"); | ||
function unionLocations(a, b) { | ||
@@ -23,23 +24,2 @@ function less(x, y) { | ||
} | ||
function getParsedLiteral(context) { | ||
var _a, _b, _c, _d, _e, _f, _g; | ||
const { flags, flagsString, patternAst } = context; | ||
return { | ||
pattern: patternAst, | ||
flags: { | ||
type: "Flags", | ||
raw: flagsString !== null && flagsString !== void 0 ? flagsString : "", | ||
parent: null, | ||
start: NaN, | ||
end: NaN, | ||
dotAll: (_a = flags.dotAll) !== null && _a !== void 0 ? _a : false, | ||
global: (_b = flags.global) !== null && _b !== void 0 ? _b : false, | ||
hasIndices: (_c = flags.hasIndices) !== null && _c !== void 0 ? _c : false, | ||
ignoreCase: (_d = flags.ignoreCase) !== null && _d !== void 0 ? _d : false, | ||
multiline: (_e = flags.multiline) !== null && _e !== void 0 ? _e : false, | ||
sticky: (_f = flags.sticky) !== null && _f !== void 0 ? _f : false, | ||
unicode: (_g = flags.unicode) !== null && _g !== void 0 ? _g : false, | ||
}, | ||
}; | ||
} | ||
exports.default = (0, utils_1.createRule)("no-super-linear-backtracking", { | ||
@@ -79,3 +59,3 @@ meta: { | ||
const { node, patternAst, flags, getRegexpLocation, fixReplaceNode, getUsageOfPattern, } = regexpContext; | ||
const result = (0, scslre_1.analyse)(getParsedLiteral(regexpContext), { | ||
const result = (0, scslre_1.analyse)((0, regexp_ast_1.getJSRegexppAst)(regexpContext), { | ||
reportTypes: { Move: false }, | ||
@@ -82,0 +62,0 @@ assumeRejectingSuffix: reportUncertain && |
@@ -8,23 +8,3 @@ "use strict"; | ||
const refa_1 = require("refa"); | ||
function getParsedLiteral(context, ignoreSticky) { | ||
var _a, _b, _c, _d, _e, _f, _g; | ||
const { flags, flagsString, patternAst } = context; | ||
return { | ||
pattern: patternAst, | ||
flags: { | ||
type: "Flags", | ||
raw: flagsString !== null && flagsString !== void 0 ? flagsString : "", | ||
parent: null, | ||
start: NaN, | ||
end: NaN, | ||
dotAll: (_a = flags.dotAll) !== null && _a !== void 0 ? _a : false, | ||
global: (_b = flags.global) !== null && _b !== void 0 ? _b : false, | ||
hasIndices: (_c = flags.hasIndices) !== null && _c !== void 0 ? _c : false, | ||
ignoreCase: (_d = flags.ignoreCase) !== null && _d !== void 0 ? _d : false, | ||
multiline: (_e = flags.multiline) !== null && _e !== void 0 ? _e : false, | ||
sticky: !ignoreSticky && ((_f = flags.sticky) !== null && _f !== void 0 ? _f : false), | ||
unicode: (_g = flags.unicode) !== null && _g !== void 0 ? _g : false, | ||
}, | ||
}; | ||
} | ||
const regexp_ast_1 = require("../utils/regexp-ast"); | ||
function dedupeReports(reports) { | ||
@@ -133,3 +113,3 @@ const seen = new Set(); | ||
const { flags } = regexpContext; | ||
const result = (0, scslre_1.analyse)(getParsedLiteral(regexpContext, true), { | ||
const result = (0, scslre_1.analyse)((0, regexp_ast_1.getJSRegexppAst)(regexpContext, true), { | ||
reportTypes: { Move: true, Self: false, Trade: false }, | ||
@@ -150,3 +130,3 @@ assumeRejectingSuffix, | ||
const { patternAst, flags } = regexpContext; | ||
const parser = refa_1.JS.Parser.fromAst(getParsedLiteral(regexpContext, true)); | ||
const parser = refa_1.JS.Parser.fromAst((0, regexp_ast_1.getJSRegexppAst)(regexpContext, true)); | ||
for (const q of findReachableQuantifiers(patternAst)) { | ||
@@ -153,0 +133,0 @@ if (q.max !== Infinity) { |
@@ -121,3 +121,3 @@ "use strict"; | ||
const range = (0, regexp_ast_analysis_1.getLengthRange)(assertion.alternatives); | ||
if (range && range.max === 1) { | ||
if (range.max === 1) { | ||
if (firstOf.exact && | ||
@@ -124,0 +124,0 @@ after.char.isSubsetOf(firstOf.char)) { |
@@ -80,3 +80,3 @@ "use strict"; | ||
const range = (0, regexp_ast_analysis_1.getLengthRange)(target); | ||
return Boolean(range && range.min === range.max); | ||
return range.min === range.max; | ||
} | ||
@@ -83,0 +83,0 @@ } |
@@ -6,5 +6,3 @@ "use strict"; | ||
const refa_1 = require("refa"); | ||
const reorder_alternatives_1 = require("../utils/reorder-alternatives"); | ||
const regexp_ast_1 = require("../utils/regexp-ast"); | ||
const alternative_prefix_1 = require("../utils/regexp-ast/alternative-prefix"); | ||
const cache = new Map(); | ||
@@ -40,2 +38,103 @@ function getAllowedChars(flags) { | ||
} | ||
const lssCache = new WeakMap(); | ||
function cachedApproximateLexicographicallySmallest(alternative, parser, flags) { | ||
let cached = lssCache.get(alternative); | ||
if (cached === undefined) { | ||
cached = approximateLexicographicallySmallest(alternative, parser, flags); | ||
lssCache.set(alternative, cached); | ||
} | ||
return cached; | ||
} | ||
const LONGEST_PREFIX_OPTIONS = { | ||
includeAfter: true, | ||
onlyInside: true, | ||
looseGroups: true, | ||
}; | ||
function approximateLexicographicallySmallest(alternative, parser, flags) { | ||
const lss = getLexicographicallySmallestFromAlternative(alternative, parser, flags); | ||
if (lss !== undefined) | ||
return lss; | ||
const prefix = (0, regexp_ast_analysis_1.getLongestPrefix)(alternative, "ltr", flags, LONGEST_PREFIX_OPTIONS); | ||
return getLexicographicallySmallestFromCharSets(prefix); | ||
} | ||
function getLexicographicallySmallestFromAlternative(alternative, parser, flags) { | ||
const { elements } = alternative; | ||
if (isOnlyCharacters(elements)) { | ||
const smallest = []; | ||
for (const e of elements) { | ||
const cs = (0, regexp_ast_analysis_1.toCharSet)(e, flags); | ||
if (cs.isEmpty) | ||
return undefined; | ||
smallest.push(cs.ranges[0].min); | ||
} | ||
return smallest; | ||
} | ||
try { | ||
const result = parser.parseElement(alternative, { | ||
assertions: "unknown", | ||
backreferences: "disable", | ||
maxBackreferenceWords: 4, | ||
maxNodes: 1000, | ||
}); | ||
const expression = (0, refa_1.transform)({ | ||
onConcatenation(concat) { | ||
concat.elements = concat.elements.filter((e) => e.type !== "Unknown"); | ||
}, | ||
}, result.expression); | ||
const nfa = refa_1.NFA.fromRegex(expression, { maxCharacter: result.maxCharacter }, { maxNodes: 1000 }); | ||
return getLexicographicallySmallestFromNfa(nfa.nodes.initial, nfa.nodes.finals); | ||
} | ||
catch (error) { | ||
return undefined; | ||
} | ||
} | ||
function isOnlyCharacters(nodes) { | ||
return nodes.every((e) => e.type === "Character" || | ||
e.type === "CharacterClass" || | ||
e.type === "CharacterSet"); | ||
} | ||
function getLexicographicallySmallestFromNfa(initial, finals) { | ||
const smallest = []; | ||
let currentStates = [initial]; | ||
const newStatesSet = new Set(); | ||
const MAX_LENGTH = 1000; | ||
for (let i = 0; i < MAX_LENGTH; i++) { | ||
if (currentStates.some((n) => finals.has(n))) { | ||
return smallest; | ||
} | ||
let min = Infinity; | ||
for (const state of currentStates) { | ||
state.out.forEach((charSet) => { | ||
if (!charSet.isEmpty) { | ||
min = Math.min(min, charSet.ranges[0].min); | ||
} | ||
}); | ||
} | ||
if (min === Infinity) { | ||
return undefined; | ||
} | ||
smallest.push(min); | ||
const newStates = []; | ||
newStatesSet.clear(); | ||
for (const state of currentStates) { | ||
state.out.forEach((charSet, to) => { | ||
if (charSet.has(min) && !newStatesSet.has(to)) { | ||
newStates.push(to); | ||
newStatesSet.add(to); | ||
} | ||
}); | ||
} | ||
currentStates = newStates; | ||
} | ||
return undefined; | ||
} | ||
function getLexicographicallySmallestFromCharSets(word) { | ||
const result = []; | ||
for (const set of word) { | ||
if (set.isEmpty) | ||
break; | ||
result.push(set.ranges[0].min); | ||
} | ||
return result; | ||
} | ||
function compareByteOrder(a, b) { | ||
@@ -48,20 +147,19 @@ if (a === b) { | ||
function compareCharSets(a, b) { | ||
if (a.isEmpty) { | ||
return 1; | ||
const aRanges = a.ranges; | ||
const bRanges = b.ranges; | ||
for (let i = 0; i < aRanges.length && i < bRanges.length; i++) { | ||
const aR = aRanges[i]; | ||
const bR = bRanges[i]; | ||
if (aR.min !== bR.min) | ||
return aR.min - bR.min; | ||
if (aR.max !== bR.max) { | ||
if (aR.max < bR.max) { | ||
return i + 1 < aRanges.length ? +1 : -1; | ||
} | ||
else { | ||
return i + 1 < bRanges.length ? -1 : +1; | ||
} | ||
} | ||
} | ||
else if (b.isEmpty) { | ||
return -1; | ||
} | ||
if (a.ranges[0].min !== b.ranges[0].min) { | ||
return a.ranges[0].min - b.ranges[0].min; | ||
} | ||
const symDiff = a.union(b).without(a.intersect(b)); | ||
if (symDiff.isEmpty) { | ||
return 0; | ||
} | ||
const min = symDiff.ranges[0].min; | ||
if (a.has(min)) { | ||
return -1; | ||
} | ||
return 1; | ||
return aRanges.length - bRanges.length; | ||
} | ||
@@ -78,13 +176,19 @@ function compareCharSetStrings(a, b) { | ||
} | ||
function sortAlternatives(alternatives, flags) { | ||
const firstChars = new Map(); | ||
for (const a of alternatives) { | ||
const chars = (0, regexp_ast_analysis_1.getFirstConsumedChar)(a, "ltr", flags); | ||
const char = chars.empty || chars.char.isEmpty | ||
? Infinity | ||
: chars.char.ranges[0].min; | ||
firstChars.set(a, char); | ||
function compareWords(a, b) { | ||
const l = Math.min(a.length, b.length); | ||
for (let i = 0; i < l; i++) { | ||
const aI = a[i]; | ||
const bI = b[i]; | ||
if (aI !== bI) | ||
return aI - bI; | ||
} | ||
return a.length - b.length; | ||
} | ||
function sortAlternatives(alternatives, parser, flags) { | ||
alternatives.sort((a, b) => { | ||
const prefixDiff = compareCharSetStrings((0, alternative_prefix_1.getLongestPrefix)(a, "ltr", flags), (0, alternative_prefix_1.getLongestPrefix)(b, "ltr", flags)); | ||
const lssDiff = compareWords(cachedApproximateLexicographicallySmallest(a, parser, flags), cachedApproximateLexicographicallySmallest(b, parser, flags)); | ||
if (lssDiff !== 0) { | ||
return lssDiff; | ||
} | ||
const prefixDiff = compareCharSetStrings((0, regexp_ast_analysis_1.getLongestPrefix)(a, "ltr", flags, LONGEST_PREFIX_OPTIONS), (0, regexp_ast_analysis_1.getLongestPrefix)(b, "ltr", flags, LONGEST_PREFIX_OPTIONS)); | ||
if (prefixDiff !== 0) { | ||
@@ -170,2 +274,3 @@ return prefixDiff; | ||
const possibleCharsCache = new Map(); | ||
const parser = (0, regexp_ast_1.getParser)(regexpContext); | ||
function getPossibleChars(a) { | ||
@@ -181,4 +286,4 @@ let chars = possibleCharsCache.get(a); | ||
const alternatives = run.elements; | ||
if ((0, reorder_alternatives_1.canReorder)(alternatives, flags)) { | ||
sortAlternatives(alternatives, flags); | ||
if ((0, regexp_ast_analysis_1.canReorder)(alternatives, flags)) { | ||
sortAlternatives(alternatives, parser, flags); | ||
trySortNumberAlternatives(alternatives); | ||
@@ -192,3 +297,3 @@ } | ||
if (elements.length > 1 && | ||
(0, reorder_alternatives_1.canReorder)(elements, flags)) { | ||
(0, regexp_ast_analysis_1.canReorder)(elements, flags)) { | ||
trySortNumberAlternatives(elements); | ||
@@ -195,0 +300,0 @@ alternatives.splice(index, elements.length, ...elements); |
@@ -17,5 +17,6 @@ "use strict"; | ||
Object.defineProperty(exports, "__esModule", { value: true }); | ||
exports.getPossiblyConsumedChar = exports.extractCaptures = exports.getRegExpNodeFromExpression = exports.getFirstConsumedCharPlusAfter = void 0; | ||
exports.getParser = exports.getJSRegexppAst = exports.getPossiblyConsumedChar = exports.extractCaptures = exports.getRegExpNodeFromExpression = exports.getFirstConsumedCharPlusAfter = void 0; | ||
const regexpp_1 = require("regexpp"); | ||
const ast_utils_1 = require("../ast-utils"); | ||
const refa_1 = require("refa"); | ||
const regexp_ast_analysis_1 = require("regexp-ast-analysis"); | ||
@@ -96,1 +97,33 @@ var common_1 = require("./common"); | ||
exports.getPossiblyConsumedChar = getPossiblyConsumedChar; | ||
function getJSRegexppAst(context, ignoreSticky = false) { | ||
var _a, _b, _c, _d, _e, _f, _g; | ||
const { flags, flagsString, patternAst } = context; | ||
return { | ||
pattern: patternAst, | ||
flags: { | ||
type: "Flags", | ||
raw: flagsString !== null && flagsString !== void 0 ? flagsString : "", | ||
parent: null, | ||
start: NaN, | ||
end: NaN, | ||
dotAll: (_a = flags.dotAll) !== null && _a !== void 0 ? _a : false, | ||
global: (_b = flags.global) !== null && _b !== void 0 ? _b : false, | ||
hasIndices: (_c = flags.hasIndices) !== null && _c !== void 0 ? _c : false, | ||
ignoreCase: (_d = flags.ignoreCase) !== null && _d !== void 0 ? _d : false, | ||
multiline: (_e = flags.multiline) !== null && _e !== void 0 ? _e : false, | ||
sticky: !ignoreSticky && ((_f = flags.sticky) !== null && _f !== void 0 ? _f : false), | ||
unicode: (_g = flags.unicode) !== null && _g !== void 0 ? _g : false, | ||
}, | ||
}; | ||
} | ||
exports.getJSRegexppAst = getJSRegexppAst; | ||
const parserCache = new WeakMap(); | ||
function getParser(context) { | ||
let cached = parserCache.get(context); | ||
if (cached === undefined) { | ||
cached = refa_1.JS.Parser.fromAst(getJSRegexppAst(context)); | ||
parserCache.set(context, cached); | ||
} | ||
return cached; | ||
} | ||
exports.getParser = getParser; |
{ | ||
"name": "eslint-plugin-regexp", | ||
"version": "1.6.0", | ||
"version": "1.7.0", | ||
"description": "ESLint plugin for finding RegExp mistakes and RegExp style guide violations.", | ||
@@ -74,3 +74,3 @@ "engines": { | ||
"eslint-plugin-prettier": "^4.0.0", | ||
"eslint-plugin-regexp": "~1.5.0", | ||
"eslint-plugin-regexp": "~1.6.0", | ||
"eslint-plugin-vue": "^8.0.0", | ||
@@ -98,3 +98,3 @@ "eslint-plugin-yml": "^0.14.0", | ||
"refa": "^0.9.0", | ||
"regexp-ast-analysis": "^0.3.0", | ||
"regexp-ast-analysis": "^0.5.1", | ||
"regexpp": "^3.2.0", | ||
@@ -101,0 +101,0 @@ "scslre": "^0.1.6" |
625410
-1.74%126
-1.56%15492
-2.01%+ Added
- Removed
Updated