Comparing version 2.3.2 to 2.3.3
# Changelog | ||
## 2.3.3 | ||
* Fig bugs [#270](https://github.com/olivernn/lunr.js/issues/270) and [#368](https://github.com/olivernn/lunr.js/issues/368), some wildcard searches over long tokens could be extremely slow, potentially pinning the current thread indefinitely. Thanks [Kyle Spearrin](https://github.com/kspearrin) and [Mohamed Eltuhamy](https://github.com/meltuhamy) for reporting. | ||
## 2.3.2 | ||
@@ -4,0 +8,0 @@ |
@@ -245,4 +245,3 @@ /*! | ||
var node = new lunr.TokenSet, | ||
root = node, | ||
wildcardFound = false | ||
root = node | ||
@@ -253,3 +252,3 @@ /* | ||
* | ||
* As soon as a wildcard character is found then a self | ||
* When a wildcard character is found then a self | ||
* referencing edge is introduced to continually match | ||
@@ -263,3 +262,2 @@ * any number of any characters. | ||
if (char == "*") { | ||
wildcardFound = true | ||
node.edges[char] = node | ||
@@ -274,7 +272,2 @@ node.final = final | ||
node = next | ||
// TODO: is this needed anymore? | ||
if (wildcardFound) { | ||
node.edges["*"] = root | ||
} | ||
} | ||
@@ -281,0 +274,0 @@ } |
{ | ||
"name": "lunr", | ||
"description": "Simple full-text search in your browser.", | ||
"version": "2.3.2", | ||
"version": "2.3.3", | ||
"author": "Oliver Nightingale", | ||
@@ -6,0 +6,0 @@ "keywords": ["search"], |
@@ -147,2 +147,10 @@ suite('lunr.TokenSet', function () { | ||
test('leading wildcard backtracking intersection', function () { | ||
var x = lunr.TokenSet.fromString('aaacbab'), | ||
y = lunr.TokenSet.fromString('*ab'), | ||
z = x.intersect(y) | ||
assert.sameMembers(['aaacbab'], z.toArray()) | ||
}) | ||
test('leading wildcard no intersection', function () { | ||
@@ -156,2 +164,10 @@ var x = lunr.TokenSet.fromString('cat'), | ||
test('leading wildcard backtracking no intersection', function () { | ||
var x = lunr.TokenSet.fromString('aaabdcbc'), | ||
y = lunr.TokenSet.fromString('*abc'), | ||
z = x.intersect(y) | ||
assert.equal(0, z.toArray().length) | ||
}) | ||
test('contained wildcard intersection', function () { | ||
@@ -165,2 +181,10 @@ var x = lunr.TokenSet.fromString('foo'), | ||
test('contained wildcard backtracking intersection', function () { | ||
var x = lunr.TokenSet.fromString('ababc'), | ||
y = lunr.TokenSet.fromString('a*bc'), | ||
z = x.intersect(y) | ||
assert.sameMembers(['ababc'], z.toArray()) | ||
}) | ||
test('contained wildcard no intersection', function () { | ||
@@ -174,2 +198,10 @@ var x = lunr.TokenSet.fromString('foo'), | ||
test('contained wildcard backtracking no intersection', function () { | ||
var x = lunr.TokenSet.fromString('ababc'), | ||
y = lunr.TokenSet.fromString('a*ac'), | ||
z = x.intersect(y) | ||
assert.equal(0, z.toArray().length) | ||
}) | ||
test('wildcard matches zero or more characters', function () { | ||
@@ -183,2 +215,29 @@ var x = lunr.TokenSet.fromString('foo'), | ||
// This test is intended to prevent 'bugs' that have lead to these | ||
// kind of intersections taking a _very_ long time. The assertion | ||
// is not of interest, just that the test does not timeout. | ||
test('catastrophic backtracking with leading characters', function () { | ||
var x = lunr.TokenSet.fromString('fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'), | ||
y = lunr.TokenSet.fromString('*ff'), | ||
z = x.intersect(y) | ||
assert.equal(1, z.toArray().length) | ||
}) | ||
test('leading and trailing backtracking intersection', function () { | ||
var x = lunr.TokenSet.fromString('acbaabab'), | ||
y = lunr.TokenSet.fromString('*ab*'), | ||
z = x.intersect(y) | ||
assert.sameMembers(['acbaabab'], z.toArray()) | ||
}) | ||
test('multiple contained wildcard backtracking', function () { | ||
var x = lunr.TokenSet.fromString('acbaabab'), | ||
y = lunr.TokenSet.fromString('a*ba*b'), | ||
z = x.intersect(y) | ||
assert.sameMembers(['acbaabab'], z.toArray()) | ||
}) | ||
test('intersect with fuzzy string substitution', function () { | ||
@@ -185,0 +244,0 @@ var x1 = lunr.TokenSet.fromString('bar'), |
Sorry, the diff of this file is too big to display
Sorry, the diff of this file is not supported yet
942888
28596