Socket
Socket
Sign inDemoInstall

fergies-inverted-index

Package Overview
Dependencies
Maintainers
1
Versions
68
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

fergies-inverted-index - npm Package Compare versions

Comparing version 1.0.2 to 2.0.0

223

dist/fergies-inverted-index.cjs.js

@@ -11,6 +11,45 @@ 'use strict';

// key might be object or string like this
// <fieldname>:<value>. Turn key into json object that is of the
// format {field: ..., value: {gte: ..., lte ...}}
const parseKey = key => {
if (isString(key)) {
if (key.indexOf(':') > -1) {
// string is expressing a specified field to search in
key = {
field: [ key.split(':')[0] ],
value: {
gte: key.split(':')[1],
lte: key.split(':')[1]
}
};
} else {
// string is not specifying a field (search in ALL fields)
key = {
value: {
gte: key,
lte: key
}
};
}
} else {
// key is object, but key.value is string
if (isString(key.value)) {
key.value = {
gte: key.value,
lte: key.value
};
}
}
return key
};
const GET = key => new Promise((resolve, reject) => {
if (key instanceof Promise) return resolve(key) // MAGIC! Enables nested promises
if (isString(key)) key = { gte: key, lte: key + '○' };
return RANGE(key).then(resolve)
// takes objects in the form of
// {
// field: ...,
// value: ... (either a string or gte/lte)
// }
return RANGE(parseKey(key)).then(resolve)
});

@@ -49,21 +88,43 @@

// Accepts a range of tokens (gte, lte) and returns an array of
// document ids together with the tokens that they have matched (a
// document can match more than one token in a range)
// Accepts a range of tokens (field, value {gte, lte}) and returns
// an array of document ids together with the tokens that they have
// matched (a document can match more than one token in a range)
const RANGE = ops => new Promise(resolve => {
const rs = {}; // resultset
db.createReadStream(ops)
.on('data', token => token.value.forEach(docId => {
rs[docId] = [...(rs[docId] || []), token.key];
return rs
new Promise(
resolve => ops.field // is a field specified?
? resolve(isString(ops.field) ? [ ops.field ] : ops.field) // use specified field (if String push to Array)
: AVAILABLE_FIELDS() // else get ALL available fields from store
.then(resolve)).then(
fields => Promise.all(
fields.map(
fieldName => new Promise(resolve => db.createReadStream({
gte: fieldName + ':' + ops.value.gte,
lte: fieldName + ':' + ops.value.lte + '○'
}).on('data', token => token.value.forEach(docId => {
rs[docId] = [...(rs[docId] || []), token.key];
return rs
})).on('end', resolve))
)
)
).then(() => resolve(
// convert map into array
Object.keys(rs).map(id => ({
_id: id,
_match: rs[id].sort()
}))
.on('end', () => resolve(
// convert map into array
Object.keys(rs).map(id => ({
_id: id,
_match: rs[id]
}))
));
)
);
});
const AVAILABLE_FIELDS = () => new Promise(resolve => {
const fieldNames = [];
db.createReadStream({
gte: '○FIELD○',
lte: '○FIELD○○'
})
.on('data', d => fieldNames.push(d.value))
.on('end', () => resolve(fieldNames));
});
// TODO: put in some validation here

@@ -87,11 +148,16 @@ // arg 1: an aggregration

// value) a single string can be used as shorthand
if (isString(key)) {
key = {
gte: key,
lte: key
};
}
// if (isString(key)) {
// key = {
// gte: key,
// lte: key
// }
// }
// TODO: some kind of verification of key object
key = parseKey(key);
return Object.assign(key, {
_id: [...result.reduce((acc, cur) => acc.add(cur._id), new Set())].sort()
_id: [...result.reduce((acc, cur) => acc.add(cur._id), new Set())].sort(),
value: {
gte: key.value.gte.split(':').pop(),
lte: key.value.lte.split(':').pop().replace(/○/g, '')
}
})

@@ -102,2 +168,3 @@ });

AGGREGATE: AGGREGATE,
AVAILABLE_FIELDS: AVAILABLE_FIELDS,
BUCKET: BUCKET,

@@ -122,39 +189,31 @@ GET: GET,

function init$2 (db) {
const MIN = key => {
var ops = {
const getRange = ops => new Promise((resolve, reject) => {
const keys = [];
db.createKeyStream(ops)
.on('data', data => { keys.push(data); })
.on('end', () => resolve(keys));
});
const MIN = key => new Promise((resolve, reject) => {
db.createKeyStream({
limit: 1,
gte: key + '!'
};
return new Promise((resolve, reject) => {
db.createKeyStream(ops)
.on('data', resolve);
})
};
}).on('data', resolve);
});
const MAX = key => {
var ops = {
const MAX = key => new Promise((resolve, reject) => {
db.createKeyStream({
limit: 1,
lte: key + '○',
reverse: true
};
return new Promise((resolve, reject) => {
db.createKeyStream(ops)
.on('data', resolve);
})
};
}).on('data', resolve);
});
const DIST = ops => {
if (typeof ops === 'string') {
ops = {
gte: ops,
lte: ops + '○'
};
}
const keys = [];
return new Promise((resolve, reject) => {
db.createKeyStream(ops)
.on('data', data => { keys.push(data); })
.on('end', () => resolve(keys));
})
};
const DIST = ops => getRange({
gte: ops.field + ':' + ((ops.value && ops.value.gte) || ''),
lte: ops.field + ':' + ((ops.value && ops.value.lte) || '') + '○'
}).then(items => items.map(item => ({
field: item.split(/:(.+)/)[0],
value: item.split(/:(.+)/)[1]
})));

@@ -190,4 +249,6 @@ return {

});
// Bump all _ids to strings. Prevents _id='0' causing problems amongst other things
if (!isNaN(obj._id)) obj._id = obj._id + '';
return {
_id: obj._id || ++incrementalId, // generate _id if not present
_id: obj._id || incrementalId + '', // generate _id if not present
keys: keys

@@ -233,9 +294,7 @@ }

const objectIndex = (docs, mode) => docs.map(doc => {
return {
key: '○DOC○' + doc._id + '○',
type: mode,
value: doc
}
});
const objectIndex = (docs, mode) => docs.map(doc => ({
key: '○DOC○' + doc._id + '○',
type: mode,
value: doc
}));

@@ -258,16 +317,28 @@ const reverseIndex = (acc, cur) => {

// else
doc._id = incrementalId++;
doc._id = ++incrementalId;
return doc
};
const writer = (docs, db, mode) => {
const availableFields = reverseIndex => [
...new Set(
reverseIndex.map(item => item.key.split(':')[0])
)
].map(f => ({
type: 'put',
key: '○FIELD○' + f + '○',
value: f
}));
const writer = (docs, db, mode) => new Promise((resolve, reject) => {
// check for _id field, autogenerate if necessary
docs = docs.map(checkID);
return new Promise((resolve, reject) => {
createMergedReverseIndex(createDeltaReverseIndex(docs), db, mode)
.then(mergedReverseIndex => {
db.batch(mergedReverseIndex.concat(objectIndex(docs, mode)), e => resolve(docs));
});
})
};
createMergedReverseIndex(
createDeltaReverseIndex(docs), db, mode
).then(mergedReverseIndex => db.batch(
mergedReverseIndex
.concat(objectIndex(docs, mode))
.concat(availableFields(mergedReverseIndex))
, e => resolve(docs)
));
});

@@ -278,10 +349,5 @@ function init$3 (db) {

// deleted
const DELETE = _ids =>
init$1(db).OBJECT(
_ids.map(_id => {
return {
_id: _id
}
})
).then(docs => writer(docs, db, 'del'));
const DELETE = _ids => init$1(db).OBJECT(
_ids.map(_id => ({ _id: _id }))
).then(docs => writer(docs, db, 'del'));

@@ -299,2 +365,3 @@ const PUT = docs => writer(docs, db, 'put');

AGGREGATE: init(db).AGGREGATE,
AVAILABLE_FIELDS: init(db).AVAILABLE_FIELDS,
AND: init(db).INTERSECTION,

@@ -301,0 +368,0 @@ BUCKET: init(db).BUCKET,

@@ -7,6 +7,45 @@ import level from 'level';

// key might be object or string like this
// <fieldname>:<value>. Turn key into json object that is of the
// format {field: ..., value: {gte: ..., lte ...}}
const parseKey = key => {
if (isString(key)) {
if (key.indexOf(':') > -1) {
// string is expressing a specified field to search in
key = {
field: [ key.split(':')[0] ],
value: {
gte: key.split(':')[1],
lte: key.split(':')[1]
}
};
} else {
// string is not specifying a field (search in ALL fields)
key = {
value: {
gte: key,
lte: key
}
};
}
} else {
// key is object, but key.value is string
if (isString(key.value)) {
key.value = {
gte: key.value,
lte: key.value
};
}
}
return key
};
const GET = key => new Promise((resolve, reject) => {
if (key instanceof Promise) return resolve(key) // MAGIC! Enables nested promises
if (isString(key)) key = { gte: key, lte: key + '○' };
return RANGE(key).then(resolve)
// takes objects in the form of
// {
// field: ...,
// value: ... (either a string or gte/lte)
// }
return RANGE(parseKey(key)).then(resolve)
});

@@ -45,21 +84,43 @@

// Accepts a range of tokens (gte, lte) and returns an array of
// document ids together with the tokens that they have matched (a
// document can match more than one token in a range)
// Accepts a range of tokens (field, value {gte, lte}) and returns
// an array of document ids together with the tokens that they have
// matched (a document can match more than one token in a range)
const RANGE = ops => new Promise(resolve => {
const rs = {}; // resultset
db.createReadStream(ops)
.on('data', token => token.value.forEach(docId => {
rs[docId] = [...(rs[docId] || []), token.key];
return rs
new Promise(
resolve => ops.field // is a field specified?
? resolve(isString(ops.field) ? [ ops.field ] : ops.field) // use specified field (if String push to Array)
: AVAILABLE_FIELDS() // else get ALL available fields from store
.then(resolve)).then(
fields => Promise.all(
fields.map(
fieldName => new Promise(resolve => db.createReadStream({
gte: fieldName + ':' + ops.value.gte,
lte: fieldName + ':' + ops.value.lte + '○'
}).on('data', token => token.value.forEach(docId => {
rs[docId] = [...(rs[docId] || []), token.key];
return rs
})).on('end', resolve))
)
)
).then(() => resolve(
// convert map into array
Object.keys(rs).map(id => ({
_id: id,
_match: rs[id].sort()
}))
.on('end', () => resolve(
// convert map into array
Object.keys(rs).map(id => ({
_id: id,
_match: rs[id]
}))
));
)
);
});
const AVAILABLE_FIELDS = () => new Promise(resolve => {
const fieldNames = [];
db.createReadStream({
gte: '○FIELD○',
lte: '○FIELD○○'
})
.on('data', d => fieldNames.push(d.value))
.on('end', () => resolve(fieldNames));
});
// TODO: put in some validation here

@@ -83,11 +144,16 @@ // arg 1: an aggregration

// value) a single string can be used as shorthand
if (isString(key)) {
key = {
gte: key,
lte: key
};
}
// if (isString(key)) {
// key = {
// gte: key,
// lte: key
// }
// }
// TODO: some kind of verification of key object
key = parseKey(key);
return Object.assign(key, {
_id: [...result.reduce((acc, cur) => acc.add(cur._id), new Set())].sort()
_id: [...result.reduce((acc, cur) => acc.add(cur._id), new Set())].sort(),
value: {
gte: key.value.gte.split(':').pop(),
lte: key.value.lte.split(':').pop().replace(/○/g, '')
}
})

@@ -98,2 +164,3 @@ });

AGGREGATE: AGGREGATE,
AVAILABLE_FIELDS: AVAILABLE_FIELDS,
BUCKET: BUCKET,

@@ -118,39 +185,31 @@ GET: GET,

function init$2 (db) {
const MIN = key => {
var ops = {
const getRange = ops => new Promise((resolve, reject) => {
const keys = [];
db.createKeyStream(ops)
.on('data', data => { keys.push(data); })
.on('end', () => resolve(keys));
});
const MIN = key => new Promise((resolve, reject) => {
db.createKeyStream({
limit: 1,
gte: key + '!'
};
return new Promise((resolve, reject) => {
db.createKeyStream(ops)
.on('data', resolve);
})
};
}).on('data', resolve);
});
const MAX = key => {
var ops = {
const MAX = key => new Promise((resolve, reject) => {
db.createKeyStream({
limit: 1,
lte: key + '○',
reverse: true
};
return new Promise((resolve, reject) => {
db.createKeyStream(ops)
.on('data', resolve);
})
};
}).on('data', resolve);
});
const DIST = ops => {
if (typeof ops === 'string') {
ops = {
gte: ops,
lte: ops + '○'
};
}
const keys = [];
return new Promise((resolve, reject) => {
db.createKeyStream(ops)
.on('data', data => { keys.push(data); })
.on('end', () => resolve(keys));
})
};
const DIST = ops => getRange({
gte: ops.field + ':' + ((ops.value && ops.value.gte) || ''),
lte: ops.field + ':' + ((ops.value && ops.value.lte) || '') + '○'
}).then(items => items.map(item => ({
field: item.split(/:(.+)/)[0],
value: item.split(/:(.+)/)[1]
})));

@@ -186,4 +245,6 @@ return {

});
// Bump all _ids to strings. Prevents _id='0' causing problems amongst other things
if (!isNaN(obj._id)) obj._id = obj._id + '';
return {
_id: obj._id || ++incrementalId, // generate _id if not present
_id: obj._id || incrementalId + '', // generate _id if not present
keys: keys

@@ -229,9 +290,7 @@ }

const objectIndex = (docs, mode) => docs.map(doc => {
return {
key: '○DOC○' + doc._id + '○',
type: mode,
value: doc
}
});
const objectIndex = (docs, mode) => docs.map(doc => ({
key: '○DOC○' + doc._id + '○',
type: mode,
value: doc
}));

@@ -254,16 +313,28 @@ const reverseIndex = (acc, cur) => {

// else
doc._id = incrementalId++;
doc._id = ++incrementalId;
return doc
};
const writer = (docs, db, mode) => {
const availableFields = reverseIndex => [
...new Set(
reverseIndex.map(item => item.key.split(':')[0])
)
].map(f => ({
type: 'put',
key: '○FIELD○' + f + '○',
value: f
}));
const writer = (docs, db, mode) => new Promise((resolve, reject) => {
// check for _id field, autogenerate if necessary
docs = docs.map(checkID);
return new Promise((resolve, reject) => {
createMergedReverseIndex(createDeltaReverseIndex(docs), db, mode)
.then(mergedReverseIndex => {
db.batch(mergedReverseIndex.concat(objectIndex(docs, mode)), e => resolve(docs));
});
})
};
createMergedReverseIndex(
createDeltaReverseIndex(docs), db, mode
).then(mergedReverseIndex => db.batch(
mergedReverseIndex
.concat(objectIndex(docs, mode))
.concat(availableFields(mergedReverseIndex))
, e => resolve(docs)
));
});

@@ -274,10 +345,5 @@ function init$3 (db) {

// deleted
const DELETE = _ids =>
init$1(db).OBJECT(
_ids.map(_id => {
return {
_id: _id
}
})
).then(docs => writer(docs, db, 'del'));
const DELETE = _ids => init$1(db).OBJECT(
_ids.map(_id => ({ _id: _id }))
).then(docs => writer(docs, db, 'del'));

@@ -295,2 +361,3 @@ const PUT = docs => writer(docs, db, 'put');

AGGREGATE: init(db).AGGREGATE,
AVAILABLE_FIELDS: init(db).AVAILABLE_FIELDS,
AND: init(db).INTERSECTION,

@@ -297,0 +364,0 @@ BUCKET: init(db).BUCKET,

{
"name": "fergies-inverted-index",
"version": "1.0.2",
"version": "2.0.0",
"description": "An inverted index that allows javascript objects to be easily serialised and retrieved using promises and map-reduce",

@@ -16,3 +16,3 @@ "main": "dist/fergies-inverted-index.cjs.js",

"JSONStream": "^1.3.5",
"level": "^5.0.1",
"level": "^6.0.0",
"traverse": "^0.6.6"

@@ -19,0 +19,0 @@ },

@@ -30,2 +30,11 @@ # Fergie's Inverted Index

// the query strings above can alternatively be expressed using JSON objects
db.AND({
field: 'land'
value: 'SCOTLAND'
}, {
field: 'colour',
value: 'GREEN'
}).then(result)
// as above, but return whole objects

@@ -162,15 +171,35 @@ db.AND('land:SCOTLAND', 'colour:GREEN').then(db.OBJECT).then(result)

For example get all names between `h` and `l`:
For example to get all Teslas do:
```javascript
db.GET({ gte: 'h', lte: 'l' }).then(result)
db.GET('Tesla').then(result) // get all documents that contain Tesla, somewhere in their structure
```
Or to get all objects that have a `name` property that begins with 'h'
Perhaps you want to be more specific and only return documents that contain `Tesla` in the `make` field
```javascript
db.GET('h').then(result)
db.GET('make:Tesla').then(result)
```
which is equivalent to:
```javascript
db.GET({
field: 'make',
value: 'Tesla'
}).then(result)
```
You can get all cars that begin with `O` to `V` in which case you could do
```javascript
db.GET({
field: 'make',
value: {
gte: 'O', // gte == greater than or equal to
lte: 'V' // lte == less than or equal to
}
}).then(result)
```
<a name="MAX"></a>

@@ -177,0 +206,0 @@

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc