synchronous-autocomplete
Advanced tools
Comparing version 2.2.0 to 2.3.0
'use strict'; | ||
var Pbf = require('pbf'); | ||
var _require = require('./schema.proto.js'), | ||
@@ -9,4 +7,3 @@ Index = _require.Index; | ||
var decode = function decode(buf) { | ||
var pbf = new Pbf(buf); | ||
var output = Index.read(pbf); | ||
var output = Index.decode(buf); | ||
@@ -13,0 +10,0 @@ var index = { |
'use strict'; | ||
var Pbf = require('pbf'); | ||
var _require = require('./schema.proto.js'), | ||
@@ -32,7 +30,5 @@ Index = _require.Index; | ||
var pbf = new Pbf(); | ||
Index.write(input, pbf); | ||
return pbf.finish(); | ||
return Index.encode(input); | ||
}; | ||
module.exports = encode; |
@@ -14,3 +14,3 @@ 'use strict'; | ||
if (tokens[fragment]) { | ||
var relevance = 1 + scores[fragment] + Math.sqrt(fragment.length); | ||
var relevance = 1 + scores[fragment] + Math.sqrt(l); | ||
@@ -33,4 +33,4 @@ var ids = tokens[fragment]; | ||
// add-one smoothing | ||
if (completion && t.length > fragment.length && fragment === t.slice(0, l)) { | ||
_relevance = 1 + scores[t] + fragment.length / t.length; | ||
if (completion && t.length > l && fragment === t.slice(0, l)) { | ||
_relevance = 1 + scores[t] + l / t.length; | ||
} else if (fuzzy && (distance = leven(fragment, t)) <= 3) { | ||
@@ -37,0 +37,0 @@ _relevance = (1 + scores[t]) / (distance + 1); |
@@ -1,2 +0,2 @@ | ||
Copyright (c) 2017, Jannis R | ||
Copyright (c) 2019, Jannis R | ||
@@ -3,0 +3,0 @@ Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. |
{ | ||
"name": "synchronous-autocomplete", | ||
"description": "Fast, simple autocompletion.", | ||
"version": "2.2.0", | ||
"version": "2.3.0", | ||
"main": "index.js", | ||
@@ -29,4 +29,4 @@ "files": [ | ||
"hifo": "^1.0.0", | ||
"leven": "^2.1.0", | ||
"pbf": "^3.1.0" | ||
"leven": "^3.0.0", | ||
"protocol-buffers-encodings": "^1.1.0" | ||
}, | ||
@@ -36,4 +36,6 @@ "devDependencies": { | ||
"babel-preset-env": "^1.6.1", | ||
"benchmark": "^2.1.4", | ||
"lodash.sortby": "^4.7.0", | ||
"normalize-for-search": "^2.0.1", | ||
"protocol-buffers": "^4.1.0", | ||
"tap-min": "^1.2.2", | ||
@@ -44,3 +46,3 @@ "tape": "^4.8.0" | ||
"transpile": "babel src --presets env --out-dir .", | ||
"pbf": "pbf src/schema.proto >src/schema.proto.js", | ||
"pbf": "protocol-buffers src/schema.proto -o schema.proto.js", | ||
"build": "npm run pbf && npm run transpile", | ||
@@ -47,0 +49,0 @@ "test": "node test.js | tap-min", |
@@ -1,53 +0,352 @@ | ||
'use strict'; // code generated by pbf v3.1.0 | ||
// This file is auto generated by the protocol-buffers compiler | ||
// Token ======================================== | ||
/* eslint-disable quotes */ | ||
/* eslint-disable indent */ | ||
/* eslint-disable no-redeclare */ | ||
/* eslint-disable camelcase */ | ||
var Token = exports.Token = {}; | ||
// Remember to `npm install --save protocol-buffers-encodings` | ||
var encodings = require('protocol-buffers-encodings') | ||
var varint = encodings.varint | ||
var skip = encodings.skip | ||
Token.read = function (pbf, end) { | ||
return pbf.readFields(Token._readField, { name: "", ids: [] }, end); | ||
}; | ||
Token._readField = function (tag, obj, pbf) { | ||
if (tag === 1) obj.name = pbf.readString();else if (tag === 2) pbf.readPackedVarint(obj.ids, true); | ||
}; | ||
Token.write = function (obj, pbf) { | ||
if (obj.name) pbf.writeStringField(1, obj.name); | ||
if (obj.ids) pbf.writePackedVarint(2, obj.ids); | ||
}; | ||
var Token = exports.Token = { | ||
buffer: true, | ||
encodingLength: null, | ||
encode: null, | ||
decode: null | ||
} | ||
// Score ======================================== | ||
var Score = exports.Score = { | ||
buffer: true, | ||
encodingLength: null, | ||
encode: null, | ||
decode: null | ||
} | ||
var Score = exports.Score = {}; | ||
var Index = exports.Index = { | ||
buffer: true, | ||
encodingLength: null, | ||
encode: null, | ||
decode: null | ||
} | ||
Score.read = function (pbf, end) { | ||
return pbf.readFields(Score._readField, { token: "", score: 0 }, end); | ||
}; | ||
Score._readField = function (tag, obj, pbf) { | ||
if (tag === 1) obj.token = pbf.readString();else if (tag === 2) obj.score = pbf.readFloat(); | ||
}; | ||
Score.write = function (obj, pbf) { | ||
if (obj.token) pbf.writeStringField(1, obj.token); | ||
if (obj.score) pbf.writeFloatField(2, obj.score); | ||
}; | ||
defineToken() | ||
defineScore() | ||
defineIndex() | ||
// Index ======================================== | ||
function defineToken () { | ||
var enc = [ | ||
encodings.string, | ||
encodings.int32 | ||
] | ||
var Index = exports.Index = {}; | ||
Token.encodingLength = encodingLength | ||
Token.encode = encode | ||
Token.decode = decode | ||
Index.read = function (pbf, end) { | ||
return pbf.readFields(Index._readField, { tokens: [], weights: [], nr_of_tokens: [], scores: [], original_ids: [] }, end); | ||
}; | ||
Index._readField = function (tag, obj, pbf) { | ||
if (tag === 1) obj.tokens.push(Token.read(pbf, pbf.readVarint() + pbf.pos));else if (tag === 2) pbf.readPackedVarint(obj.weights, true);else if (tag === 3) pbf.readPackedVarint(obj.nr_of_tokens, true);else if (tag === 4) obj.scores.push(Score.read(pbf, pbf.readVarint() + pbf.pos));else if (tag === 5) obj.original_ids.push(pbf.readString()); | ||
}; | ||
Index.write = function (obj, pbf) { | ||
if (obj.tokens) for (var i = 0; i < obj.tokens.length; i++) { | ||
pbf.writeMessage(1, Token.write, obj.tokens[i]); | ||
}if (obj.weights) pbf.writePackedVarint(2, obj.weights); | ||
if (obj.nr_of_tokens) pbf.writePackedVarint(3, obj.nr_of_tokens); | ||
if (obj.scores) for (i = 0; i < obj.scores.length; i++) { | ||
pbf.writeMessage(4, Score.write, obj.scores[i]); | ||
}if (obj.original_ids) for (i = 0; i < obj.original_ids.length; i++) { | ||
pbf.writeStringField(5, obj.original_ids[i]); | ||
function encodingLength (obj) { | ||
var length = 0 | ||
if (defined(obj.name)) { | ||
var len = enc[0].encodingLength(obj.name) | ||
length += 1 + len | ||
} | ||
}; | ||
if (defined(obj.ids)) { | ||
for (var i = 0; i < obj.ids.length; i++) { | ||
if (!defined(obj.ids[i])) continue | ||
var len = enc[1].encodingLength(obj.ids[i]) | ||
length += 1 + len | ||
} | ||
} | ||
return length | ||
} | ||
function encode (obj, buf, offset) { | ||
if (!offset) offset = 0 | ||
if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) | ||
var oldOffset = offset | ||
if (defined(obj.name)) { | ||
buf[offset++] = 10 | ||
enc[0].encode(obj.name, buf, offset) | ||
offset += enc[0].encode.bytes | ||
} | ||
if (defined(obj.ids)) { | ||
for (var i = 0; i < obj.ids.length; i++) { | ||
if (!defined(obj.ids[i])) continue | ||
buf[offset++] = 16 | ||
enc[1].encode(obj.ids[i], buf, offset) | ||
offset += enc[1].encode.bytes | ||
} | ||
} | ||
encode.bytes = offset - oldOffset | ||
return buf | ||
} | ||
function decode (buf, offset, end) { | ||
if (!offset) offset = 0 | ||
if (!end) end = buf.length | ||
if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") | ||
var oldOffset = offset | ||
var obj = { | ||
name: "", | ||
ids: [] | ||
} | ||
while (true) { | ||
if (end <= offset) { | ||
decode.bytes = offset - oldOffset | ||
return obj | ||
} | ||
var prefix = varint.decode(buf, offset) | ||
offset += varint.decode.bytes | ||
var tag = prefix >> 3 | ||
switch (tag) { | ||
case 1: | ||
obj.name = enc[0].decode(buf, offset) | ||
offset += enc[0].decode.bytes | ||
break | ||
case 2: | ||
obj.ids.push(enc[1].decode(buf, offset)) | ||
offset += enc[1].decode.bytes | ||
break | ||
default: | ||
offset = skip(prefix & 7, buf, offset) | ||
} | ||
} | ||
} | ||
} | ||
function defineScore () { | ||
var enc = [ | ||
encodings.string, | ||
encodings.float | ||
] | ||
Score.encodingLength = encodingLength | ||
Score.encode = encode | ||
Score.decode = decode | ||
function encodingLength (obj) { | ||
var length = 0 | ||
if (defined(obj.token)) { | ||
var len = enc[0].encodingLength(obj.token) | ||
length += 1 + len | ||
} | ||
if (defined(obj.score)) { | ||
var len = enc[1].encodingLength(obj.score) | ||
length += 1 + len | ||
} | ||
return length | ||
} | ||
function encode (obj, buf, offset) { | ||
if (!offset) offset = 0 | ||
if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) | ||
var oldOffset = offset | ||
if (defined(obj.token)) { | ||
buf[offset++] = 10 | ||
enc[0].encode(obj.token, buf, offset) | ||
offset += enc[0].encode.bytes | ||
} | ||
if (defined(obj.score)) { | ||
buf[offset++] = 21 | ||
enc[1].encode(obj.score, buf, offset) | ||
offset += enc[1].encode.bytes | ||
} | ||
encode.bytes = offset - oldOffset | ||
return buf | ||
} | ||
function decode (buf, offset, end) { | ||
if (!offset) offset = 0 | ||
if (!end) end = buf.length | ||
if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") | ||
var oldOffset = offset | ||
var obj = { | ||
token: "", | ||
score: 0 | ||
} | ||
while (true) { | ||
if (end <= offset) { | ||
decode.bytes = offset - oldOffset | ||
return obj | ||
} | ||
var prefix = varint.decode(buf, offset) | ||
offset += varint.decode.bytes | ||
var tag = prefix >> 3 | ||
switch (tag) { | ||
case 1: | ||
obj.token = enc[0].decode(buf, offset) | ||
offset += enc[0].decode.bytes | ||
break | ||
case 2: | ||
obj.score = enc[1].decode(buf, offset) | ||
offset += enc[1].decode.bytes | ||
break | ||
default: | ||
offset = skip(prefix & 7, buf, offset) | ||
} | ||
} | ||
} | ||
} | ||
function defineIndex () { | ||
var enc = [ | ||
Token, | ||
encodings.int32, | ||
Score, | ||
encodings.string | ||
] | ||
Index.encodingLength = encodingLength | ||
Index.encode = encode | ||
Index.decode = decode | ||
function encodingLength (obj) { | ||
var length = 0 | ||
if (defined(obj.tokens)) { | ||
for (var i = 0; i < obj.tokens.length; i++) { | ||
if (!defined(obj.tokens[i])) continue | ||
var len = enc[0].encodingLength(obj.tokens[i]) | ||
length += varint.encodingLength(len) | ||
length += 1 + len | ||
} | ||
} | ||
if (defined(obj.weights)) { | ||
for (var i = 0; i < obj.weights.length; i++) { | ||
if (!defined(obj.weights[i])) continue | ||
var len = enc[1].encodingLength(obj.weights[i]) | ||
length += 1 + len | ||
} | ||
} | ||
if (defined(obj.nr_of_tokens)) { | ||
for (var i = 0; i < obj.nr_of_tokens.length; i++) { | ||
if (!defined(obj.nr_of_tokens[i])) continue | ||
var len = enc[1].encodingLength(obj.nr_of_tokens[i]) | ||
length += 1 + len | ||
} | ||
} | ||
if (defined(obj.scores)) { | ||
for (var i = 0; i < obj.scores.length; i++) { | ||
if (!defined(obj.scores[i])) continue | ||
var len = enc[2].encodingLength(obj.scores[i]) | ||
length += varint.encodingLength(len) | ||
length += 1 + len | ||
} | ||
} | ||
if (defined(obj.original_ids)) { | ||
for (var i = 0; i < obj.original_ids.length; i++) { | ||
if (!defined(obj.original_ids[i])) continue | ||
var len = enc[3].encodingLength(obj.original_ids[i]) | ||
length += 1 + len | ||
} | ||
} | ||
return length | ||
} | ||
function encode (obj, buf, offset) { | ||
if (!offset) offset = 0 | ||
if (!buf) buf = Buffer.allocUnsafe(encodingLength(obj)) | ||
var oldOffset = offset | ||
if (defined(obj.tokens)) { | ||
for (var i = 0; i < obj.tokens.length; i++) { | ||
if (!defined(obj.tokens[i])) continue | ||
buf[offset++] = 10 | ||
varint.encode(enc[0].encodingLength(obj.tokens[i]), buf, offset) | ||
offset += varint.encode.bytes | ||
enc[0].encode(obj.tokens[i], buf, offset) | ||
offset += enc[0].encode.bytes | ||
} | ||
} | ||
if (defined(obj.weights)) { | ||
for (var i = 0; i < obj.weights.length; i++) { | ||
if (!defined(obj.weights[i])) continue | ||
buf[offset++] = 16 | ||
enc[1].encode(obj.weights[i], buf, offset) | ||
offset += enc[1].encode.bytes | ||
} | ||
} | ||
if (defined(obj.nr_of_tokens)) { | ||
for (var i = 0; i < obj.nr_of_tokens.length; i++) { | ||
if (!defined(obj.nr_of_tokens[i])) continue | ||
buf[offset++] = 24 | ||
enc[1].encode(obj.nr_of_tokens[i], buf, offset) | ||
offset += enc[1].encode.bytes | ||
} | ||
} | ||
if (defined(obj.scores)) { | ||
for (var i = 0; i < obj.scores.length; i++) { | ||
if (!defined(obj.scores[i])) continue | ||
buf[offset++] = 34 | ||
varint.encode(enc[2].encodingLength(obj.scores[i]), buf, offset) | ||
offset += varint.encode.bytes | ||
enc[2].encode(obj.scores[i], buf, offset) | ||
offset += enc[2].encode.bytes | ||
} | ||
} | ||
if (defined(obj.original_ids)) { | ||
for (var i = 0; i < obj.original_ids.length; i++) { | ||
if (!defined(obj.original_ids[i])) continue | ||
buf[offset++] = 42 | ||
enc[3].encode(obj.original_ids[i], buf, offset) | ||
offset += enc[3].encode.bytes | ||
} | ||
} | ||
encode.bytes = offset - oldOffset | ||
return buf | ||
} | ||
function decode (buf, offset, end) { | ||
if (!offset) offset = 0 | ||
if (!end) end = buf.length | ||
if (!(end <= buf.length && offset <= buf.length)) throw new Error("Decoded message is not valid") | ||
var oldOffset = offset | ||
var obj = { | ||
tokens: [], | ||
weights: [], | ||
nr_of_tokens: [], | ||
scores: [], | ||
original_ids: [] | ||
} | ||
while (true) { | ||
if (end <= offset) { | ||
decode.bytes = offset - oldOffset | ||
return obj | ||
} | ||
var prefix = varint.decode(buf, offset) | ||
offset += varint.decode.bytes | ||
var tag = prefix >> 3 | ||
switch (tag) { | ||
case 1: | ||
var len = varint.decode(buf, offset) | ||
offset += varint.decode.bytes | ||
obj.tokens.push(enc[0].decode(buf, offset, offset + len)) | ||
offset += enc[0].decode.bytes | ||
break | ||
case 2: | ||
obj.weights.push(enc[1].decode(buf, offset)) | ||
offset += enc[1].decode.bytes | ||
break | ||
case 3: | ||
obj.nr_of_tokens.push(enc[1].decode(buf, offset)) | ||
offset += enc[1].decode.bytes | ||
break | ||
case 4: | ||
var len = varint.decode(buf, offset) | ||
offset += varint.decode.bytes | ||
obj.scores.push(enc[2].decode(buf, offset, offset + len)) | ||
offset += enc[2].decode.bytes | ||
break | ||
case 5: | ||
obj.original_ids.push(enc[3].decode(buf, offset)) | ||
offset += enc[3].decode.bytes | ||
break | ||
default: | ||
offset = skip(prefix & 7, buf, offset) | ||
} | ||
} | ||
} | ||
} | ||
function defined (val) { | ||
return val !== null && val !== undefined && (typeof val !== 'number' || !isNaN(val)) | ||
} |
'use strict' | ||
const Pbf = require('pbf') | ||
const {Index} = require('./schema.proto.js') | ||
const decode = (buf) => { | ||
const pbf = new Pbf(buf) | ||
const output = Index.read(pbf) | ||
const output = Index.decode(buf) | ||
@@ -11,0 +8,0 @@ const index = { |
'use strict' | ||
const Pbf = require('pbf') | ||
const {Index} = require('./schema.proto.js') | ||
@@ -32,7 +30,5 @@ | ||
const pbf = new Pbf() | ||
Index.write(input, pbf) | ||
return pbf.finish() | ||
return Index.encode(input) | ||
} | ||
module.exports = encode |
@@ -14,3 +14,3 @@ 'use strict' | ||
if (tokens[fragment]) { | ||
const relevance = 1 + scores[fragment] + Math.sqrt(fragment.length) | ||
const relevance = 1 + scores[fragment] + Math.sqrt(l) | ||
@@ -33,4 +33,4 @@ const ids = tokens[fragment] | ||
// add-one smoothing | ||
if (completion && t.length > fragment.length && fragment === t.slice(0, l)) { | ||
relevance = 1 + scores[t] + fragment.length / t.length | ||
if (completion && t.length > l && fragment === t.slice(0, l)) { | ||
relevance = 1 + scores[t] + l / t.length | ||
} else if (fuzzy && (distance = leven(fragment, t)) <= 3) { | ||
@@ -37,0 +37,0 @@ relevance = (1 + scores[t]) / (distance + 1) |
28317
694
8
13
+ Addedb4a@1.6.7(transitive)
+ Addedleven@3.1.0(transitive)
+ Addedprotocol-buffers-encodings@1.2.0(transitive)
+ Addedsigned-varint@2.0.1(transitive)
+ Addedvarint@5.0.0(transitive)
- Removedpbf@^3.1.0
- Removedieee754@1.2.1(transitive)
- Removedleven@2.1.0(transitive)
- Removedpbf@3.3.0(transitive)
- Removedprotocol-buffers-schema@3.6.0(transitive)
- Removedresolve-protobuf-schema@2.1.0(transitive)
Updatedleven@^3.0.0