New Case Study:See how Anthropic automated 95% of dependency reviews with Socket.Learn More
Socket
Sign inDemoInstall
Socket

pico-engine-core

Package Overview
Dependencies
Maintainers
1
Versions
145
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

pico-engine-core - npm Package Compare versions

Comparing version 0.46.0 to 0.46.1

15

package.json
{
"name": "pico-engine-core",
"version": "0.46.0",
"version": "0.46.1",
"description": "The core javascript api for the pico-engine. (no http, logging, process management etc...)",

@@ -30,6 +30,6 @@ "main": "src/index.js",

"faucet": "0.0.1",
"krl-compiler": "^0.46.0",
"krl-compiler": "^0.46.1",
"memdown": "^1.4.1",
"onchange": "^3.2.1",
"standard": "^11.0.1",
"onchange": "^4.1.0",
"standard": "^12.0.1",
"tape": "^4.6.0"

@@ -42,6 +42,6 @@ },

"comment-regex": "^1.0.0",
"cuid": "^1.3.8",
"cuid": "^2.1.4",
"dependency-graph": "^0.7.0",
"encoding-down": "^3.0.0",
"krl-stdlib": "^0.46.0",
"krl-stdlib": "^0.46.1",
"level-json-coerce-null": "^1.0.1",

@@ -58,3 +58,4 @@ "levelup": "^2.0.0",

"symbol-table": "^1.1.0"
}
},
"gitHead": "0a25ac43e5b62403c3b971db30306f358d6d43a4"
}
var _ = require('lodash')
var ktypes = require('krl-stdlib/types')
function toFloat (v) {
return parseFloat(v) || 0
return ktypes.toNumberOrNull(v) || 0
}

@@ -6,0 +7,0 @@

@@ -20,3 +20,3 @@ var test = require('tape')

try {
cleanIt({name: ' '})
cleanIt({ name: ' ' })
t.fail('should throw')

@@ -30,3 +30,3 @@ } catch (e) {

name: 'foo',
event: {allow: '*'}
event: { allow: '*' }
})

@@ -40,3 +40,3 @@ t.fail('should throw')

name: 'foo',
query: {allow: 'ALL'}
query: { allow: 'ALL' }
})

@@ -51,3 +51,3 @@ t.fail('should throw')

name: 'foo',
event: {allow: ['wat']}
event: { allow: ['wat'] }
})

@@ -61,3 +61,3 @@ t.fail('should throw..')

name: 'foo',
query: {allow: ['wat']}
query: { allow: ['wat'] }
})

@@ -71,3 +71,3 @@ t.fail('should throw..')

name: 'foo',
bar: {allow: ['wat']},
bar: { allow: ['wat'] },
baz: true

@@ -82,3 +82,3 @@ })

name: 'foo',
event: {allow: [{}], wat: []}
event: { allow: [{}], wat: [] }
})

@@ -92,3 +92,3 @@ t.fail('should throw..')

name: 'foo',
query: {allow: [{}], wat: []}
query: { allow: [{}], wat: [] }
})

@@ -102,3 +102,3 @@ t.fail('should throw..')

name: 'foo',
event: {allow: [{wat: 1}]}
event: { allow: [{ wat: 1 }] }
})

@@ -112,3 +112,3 @@ t.fail('should throw..')

name: 'foo',
query: {allow: [{wat: 1}]}
query: { allow: [{ wat: 1 }] }
})

@@ -136,3 +136,3 @@ t.fail('should throw..')

name: 'foo',
event: {allow: [{}]}
event: { allow: [{}] }
}), {

@@ -154,5 +154,5 @@ name: 'foo',

allow: [
{domain: 'one ', type: 'thrEE'},
{domain: ' fIVe '},
{type: '\tsix '}
{ domain: 'one ', type: 'thrEE' },
{ domain: ' fIVe ' },
{ type: '\tsix ' }
]

@@ -165,5 +165,5 @@ }

allow: [
{domain: 'one', type: 'thrEE'},
{domain: 'fIVe'},
{type: 'six'}
{ domain: 'one', type: 'thrEE' },
{ domain: 'fIVe' },
{ type: 'six' }
]

@@ -181,5 +181,5 @@ },

allow: [
{rid: 'one ', name: 'thrEE'},
{rid: ' fIVe '},
{name: '\tsix '}
{ rid: 'one ', name: 'thrEE' },
{ rid: ' fIVe ' },
{ name: '\tsix ' }
]

@@ -196,5 +196,5 @@ }

allow: [
{rid: 'one', name: 'thrEE'},
{rid: 'fIVe'},
{name: 'six'}
{ rid: 'one', name: 'thrEE' },
{ rid: 'fIVe' },
{ name: 'six' }
]

@@ -201,0 +201,0 @@ }

@@ -12,3 +12,3 @@ var test = require('tape')

try {
cleanEvent({eci: 0})
cleanEvent({ eci: 0 })
t.fail('should throw')

@@ -19,3 +19,3 @@ } catch (e) {

try {
cleanEvent({eci: ''})
cleanEvent({ eci: '' })
t.fail('should throw')

@@ -26,3 +26,3 @@ } catch (e) {

try {
cleanEvent({eci: ' '})
cleanEvent({ eci: ' ' })
t.fail('should throw')

@@ -33,3 +33,3 @@ } catch (e) {

try {
cleanEvent({eci: 'eci-1', domain: ''})
cleanEvent({ eci: 'eci-1', domain: '' })
t.fail('should throw')

@@ -40,3 +40,3 @@ } catch (e) {

try {
cleanEvent({eci: 'eci-1', domain: 'foo'})
cleanEvent({ eci: 'eci-1', domain: 'foo' })
t.fail('should throw')

@@ -47,3 +47,3 @@ } catch (e) {

try {
cleanEvent({eci: 'eci-1', domain: 'foo', type: ' '})
cleanEvent({ eci: 'eci-1', domain: 'foo', type: ' ' })
t.fail('should throw')

@@ -68,3 +68,3 @@ } catch (e) {

// attrs - should not be mutable
var attrs = {what: {is: ['this']}}
var attrs = { what: { is: ['this'] } }
var event = cleanEvent({

@@ -94,3 +94,3 @@ eci: 'eci123',

type: ' \t bar ',
attrs: {' foo ': " don't trim these "}
attrs: { ' foo ': " don't trim these " }
}), {

@@ -101,3 +101,3 @@ eci: 'eci123',

type: 'bar',
attrs: {' foo ': " don't trim these "}
attrs: { ' foo ': " don't trim these " }
})

@@ -158,4 +158,4 @@

testAttrs(
{a: null, b: void 0, c: NaN},
{a: null, b: null, c: null},
{ a: null, b: void 0, c: NaN },
{ a: null, b: null, c: null },
"attrs normalize to JSON null's"

@@ -167,3 +167,3 @@ );

arguments,
{'0': 'foo', '1': 'bar'},
{ '0': 'foo', '1': 'bar' },
'non "plain" objects should work as Maps'

@@ -191,3 +191,3 @@ )

testEid([1, 2], '[Array]')
testEid({foo: 'bar'}, '[Map]')
testEid({ foo: 'bar' }, '[Map]')

@@ -194,0 +194,0 @@ testEid(123, '123')

@@ -12,3 +12,3 @@ var test = require('tape')

try {
cleanQuery({eci: 0})
cleanQuery({ eci: 0 })
t.fail('should throw')

@@ -19,3 +19,3 @@ } catch (e) {

try {
cleanQuery({eci: ''})
cleanQuery({ eci: '' })
t.fail('should throw')

@@ -26,3 +26,3 @@ } catch (e) {

try {
cleanQuery({eci: ' '})
cleanQuery({ eci: ' ' })
t.fail('should throw')

@@ -33,3 +33,3 @@ } catch (e) {

try {
cleanQuery({eci: 'eci-1', rid: ''})
cleanQuery({ eci: 'eci-1', rid: '' })
t.fail('should throw')

@@ -40,3 +40,3 @@ } catch (e) {

try {
cleanQuery({eci: 'eci-1', rid: 'foo'})
cleanQuery({ eci: 'eci-1', rid: 'foo' })
t.fail('should throw')

@@ -47,3 +47,3 @@ } catch (e) {

try {
cleanQuery({eci: 'eci-1', rid: 'foo', name: ' '})
cleanQuery({ eci: 'eci-1', rid: 'foo', name: ' ' })
t.fail('should throw')

@@ -67,3 +67,3 @@ } catch (e) {

// args - should not be mutable
var args = {what: {is: ['this']}}
var args = { what: { is: ['this'] } }
var query = cleanQuery({

@@ -90,3 +90,3 @@ eci: 'eci123',

name: ' \t bar ',
args: {' foo ': " don't trim these "}
args: { ' foo ': " don't trim these " }
}), {

@@ -96,3 +96,3 @@ eci: 'eci123',

name: 'bar',
args: {' foo ': " don't trim these "}
args: { ' foo ': " don't trim these " }
})

@@ -150,4 +150,4 @@

testAttrs(
{a: null, b: void 0, c: NaN},
{a: null, b: null, c: null},
{ a: null, b: void 0, c: NaN },
{ a: null, b: null, c: null },
"args normalize to JSON null's"

@@ -159,3 +159,3 @@ );

arguments,
{'0': 'foo', '1': 'bar'},
{ '0': 'foo', '1': 'bar' },
'non "plain" objects should work as Maps'

@@ -162,0 +162,0 @@ )

@@ -64,6 +64,6 @@ var _ = require('lodash')

}
ops.push({type: 'put', key: keyPrefix, value: root})
ops.push({ type: 'put', key: keyPrefix, value: root })
if (_.isEmpty(subPath)) {
ops.push({type: 'put', key: subkeyPrefix, value: val})
ops.push({ type: 'put', key: subkeyPrefix, value: val })
ldb.batch(ops, callback)

@@ -80,3 +80,3 @@ return

data = _.set(data, subPath, val)
ops.push({type: 'put', key: subkeyPrefix, value: data})
ops.push({ type: 'put', key: subkeyPrefix, value: data })
ldb.batch(ops, callback)

@@ -97,3 +97,3 @@ })

}, function (key) {
ops.push({type: 'del', key: key})
ops.push({ type: 'del', key: key })
}, function (err) {

@@ -207,3 +207,3 @@ if (err) return callback(err)

}, function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({ type: 'del', key: key })
}, function (err) {

@@ -517,18 +517,18 @@ if (err) return callback(err)

keyRange(['pico', id], function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({ type: 'del', key: key })
}),
keyRange(['pico-eci-list', id], function (key) {
var eci = key[2]
dbOps.push({type: 'del', key: key})
dbOps.push({type: 'del', key: ['channel', eci]})
dbOps.push({ type: 'del', key: key })
dbOps.push({ type: 'del', key: ['channel', eci] })
}),
keyRange(['entvars', id], function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({ type: 'del', key: key })
}),
keyRange(['pico-ruleset', id], function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({type: 'del', key: ['ruleset-pico', key[2], key[1]]})
dbOps.push({ type: 'del', key: key })
dbOps.push({ type: 'del', key: ['ruleset-pico', key[2], key[1]] })
}),
keyRange(['pico-children', id], function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({ type: 'del', key: key })
}),

@@ -544,3 +544,3 @@ function (next) {

keyRange(['pico-children', pico.parent_id, id], function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({ type: 'del', key: key })
})(next)

@@ -552,3 +552,3 @@ return

if (data.id === id) {
dbOps.push({type: 'del', key: ['root_pico']})
dbOps.push({ type: 'del', key: ['root_pico'] })
}

@@ -601,4 +601,4 @@ next()

var dbOps = [
{type: 'del', key: ['pico-ruleset', picoId, rid]},
{type: 'del', key: ['ruleset-pico', rid, picoId]}
{ type: 'del', key: ['pico-ruleset', picoId, rid] },
{ type: 'del', key: ['ruleset-pico', rid, picoId] }
]

@@ -609,3 +609,3 @@ dbRange(ldb, {

}, function (key) {
dbOps.push({type: 'del', key: key})
dbOps.push({ type: 'del', key: key })
}, function (err) {

@@ -656,4 +656,4 @@ if (err) return callback(err)

var dbOps = [
{type: 'del', key: ['channel', eci]},
{type: 'del', key: ['pico-eci-list', pico.id, eci]}
{ type: 'del', key: ['channel', eci] },
{ type: 'del', key: ['pico-eci-list', pico.id, eci] }
]

@@ -781,5 +781,14 @@ ldb.batch(dbOps, callback)

}
callback(null, _.has(rule.select.state_machine, data && data.state)
? data
: {state: 'start'})
data = data || {}
var states = _.filter(_.flattenDeep([data.state]), function (state) {
return _.has(rule.select.state_machine, state)
})
if (states.length === 0) {
data.state = 'start'
} else if (states.length === 1) {
data.state = states[0]
} else {
data.state = states
}
callback(null, data)
})

@@ -997,3 +1006,3 @@ },

ldb.batch(_.map(toDel, function (key) {
return {type: 'del', key: key}
return { type: 'del', key: key }
}), callback)

@@ -1019,4 +1028,4 @@ })

ldb.batch([
{type: 'put', key: ['scheduled', id], value: val},
{type: 'put', key: ['scheduled_by_at', at, id], value: val}
{ type: 'put', key: ['scheduled', id], value: val },
{ type: 'put', key: ['scheduled_by_at', at, id], value: val }
], function (err) {

@@ -1045,4 +1054,4 @@ if (err) return callback(err)

ldb.batch([
{type: 'del', key: ['scheduled', id]},
{type: 'del', key: ['scheduled_by_at', at, id]}
{ type: 'del', key: ['scheduled', id] },
{ type: 'del', key: ['scheduled_by_at', at, id] }
], callback)

@@ -1060,3 +1069,3 @@ },

ldb.batch([
{type: 'put', key: ['scheduled', id], value: val}
{ type: 'put', key: ['scheduled', id], value: val }
], function (err) {

@@ -1084,7 +1093,7 @@ if (err) return callback(err)

var dbOps = [
{type: 'del', key: ['scheduled', id]}
{ type: 'del', key: ['scheduled', id] }
]
if (_.has(info, 'at')) {
// also remove the `at` index
dbOps.push({type: 'del', key: ['scheduled_by_at', new Date(info.at), id]})
dbOps.push({ type: 'del', key: ['scheduled_by_at', new Date(info.at), id] })
}

@@ -1091,0 +1100,0 @@

@@ -32,4 +32,4 @@ var _ = require('lodash')

rule0: async.apply(db.addRulesetToPico, 'id0', 'rs0'),
chan2: async.apply(db.newChannel, {pico_id: 'id0', name: 'two', type: 't', policy_id: ADMIN_POLICY_ID}),
pico1: async.apply(db.newPico, {parent_id: 'id0'}),
chan2: async.apply(db.newChannel, { pico_id: 'id0', name: 'two', type: 't', policy_id: ADMIN_POLICY_ID }),
pico1: async.apply(db.newPico, { parent_id: 'id0' }),
end_db: async.apply(db.toObj),

@@ -104,5 +104,5 @@ rmpico0: async.apply(db.removePico, 'id0'),

},
'pico-ruleset': {'id0': {'rs0': {on: true}}},
'ruleset-pico': {'rs0': {'id0': {on: true}}},
'pico-children': {'id0': {'id3': true}},
'pico-ruleset': { 'id0': { 'rs0': { on: true } } },
'ruleset-pico': { 'rs0': { 'id0': { on: true } } },
'pico-children': { 'id0': { 'id3': true } },
'pico-eci-list': {

@@ -161,3 +161,3 @@ 'id0': {

t.deepEquals(data.start_db, {})
t.deepEquals(data.store, {rid: rid, hash: hash})
t.deepEquals(data.store, { rid: rid, hash: hash })
t.deepEquals(data.findRulesetsByURL, [{

@@ -247,17 +247,17 @@ rid: rid,

}),
async.apply(db.newChannel, {pico_id: 'foo', name: 'bar', type: 'baz'}),
async.apply(db.newChannel, { pico_id: 'foo', name: 'bar', type: 'baz' }),
async.apply(db.newPico, {}),
tstRoot(function (err, rPico) {
t.notOk(err)
t.deepEquals(rPico, {id: 'id1', parent_id: null, admin_eci: 'id2'})
t.deepEquals(rPico, { id: 'id1', parent_id: null, admin_eci: 'id2' })
}),
async.apply(db.newPico, {parent_id: 'id1'}),
async.apply(db.newPico, { parent_id: 'id1' }),
tstRoot(function (err, rPico) {
t.notOk(err)
t.deepEquals(rPico, {id: 'id1', parent_id: null, admin_eci: 'id2'})
t.deepEquals(rPico, { id: 'id1', parent_id: null, admin_eci: 'id2' })
}),
async.apply(db.newPico, {parent_id: null}),
async.apply(db.newPico, { parent_id: null }),
tstRoot(function (err, rPico) {
t.notOk(err)
t.deepEquals(rPico, {id: 'id5', parent_id: null, admin_eci: 'id6'})
t.deepEquals(rPico, { id: 'id5', parent_id: null, admin_eci: 'id6' })
})

@@ -359,3 +359,3 @@ ], t.end)

type: type,
attributes: {some: 'attr'}
attributes: { some: 'attr' }
}, callback)

@@ -400,3 +400,3 @@ }

at: new Date('Feb 22, 2222'),
event: {domain: 'foobar', type: 'foo', attributes: {some: 'attr'}}
event: { domain: 'foobar', type: 'foo', attributes: { some: 'attr' } }
})

@@ -406,3 +406,3 @@ t.deepEquals(data.at1, {

at: new Date('Feb 23, 2222'),
event: {domain: 'foobar', type: 'bar', attributes: {some: 'attr'}}
event: { domain: 'foobar', type: 'bar', attributes: { some: 'attr' } }
})

@@ -412,3 +412,3 @@ t.deepEquals(data.at2, {

at: new Date('Feb 2, 2222'),
event: {domain: 'foobar', type: 'baz', attributes: {some: 'attr'}}
event: { domain: 'foobar', type: 'baz', attributes: { some: 'attr' } }
})

@@ -450,3 +450,3 @@

type: type,
attributes: {some: 'attr'}
attributes: { some: 'attr' }
}, callback)

@@ -477,3 +477,3 @@ }

timespec: '*/5 * * * * *',
event: {domain: 'foobar', type: 'foo', attributes: {some: 'attr'}}
event: { domain: 'foobar', type: 'foo', attributes: { some: 'attr' } }
})

@@ -483,9 +483,9 @@ t.deepEquals(data.rep1, {

timespec: '* */5 * * * *',
event: {domain: 'foobar', type: 'bar', attributes: {some: 'attr'}}
event: { domain: 'foobar', type: 'bar', attributes: { some: 'attr' } }
})
t.deepEquals(data.mid_db, {scheduled: {
t.deepEquals(data.mid_db, { scheduled: {
id0: data.rep0,
id1: data.rep1
}})
} })

@@ -519,8 +519,8 @@ t.deepEquals(data.list, [

t.deepEquals(data.db_before, {
entvars: {pico0: {rid0: {
foo: {type: 'String', value: 'val0'},
bar: {type: 'String', value: 'val1'}
}}},
'pico-ruleset': {'pico0': {'rid0': {on: true}}},
'ruleset-pico': {'rid0': {'pico0': {on: true}}}
entvars: { pico0: { rid0: {
foo: { type: 'String', value: 'val0' },
bar: { type: 'String', value: 'val1' }
} } },
'pico-ruleset': { 'pico0': { 'rid0': { on: true } } },
'ruleset-pico': { 'rid0': { 'pico0': { on: true } } }
})

@@ -540,4 +540,4 @@

c4_p0: async.apply(db.newChannel, {pico_id: 'id0', name: 'four', type: 't'}),
c5_p1: async.apply(db.newChannel, {pico_id: 'id2', name: 'five', type: 't'}),
c4_p0: async.apply(db.newChannel, { pico_id: 'id0', name: 'four', type: 't' }),
c5_p1: async.apply(db.newChannel, { pico_id: 'id2', name: 'five', type: 't' }),

@@ -572,4 +572,4 @@ get_c2: async.apply(db.getPicoIDByECI, 'id1'),

c4_p0: async.apply(db.newChannel, {pico_id: 'id0', name: 'four', type: 't4', policy_id: ADMIN_POLICY_ID}),
c5_p1: async.apply(db.newChannel, {pico_id: 'id2', name: 'five', type: 't5', policy_id: ADMIN_POLICY_ID}),
c4_p0: async.apply(db.newChannel, { pico_id: 'id0', name: 'four', type: 't4', policy_id: ADMIN_POLICY_ID }),
c5_p1: async.apply(db.newChannel, { pico_id: 'id2', name: 'five', type: 't5', policy_id: ADMIN_POLICY_ID }),

@@ -745,8 +745,8 @@ list0: async.apply(db.listChannels, 'id0'),

async.apply(db.newPico, {}), // id0 and channel id1
async.apply(db.newPico, {parent_id: 'id0'}), // id2 + id3
async.apply(db.newPico, {parent_id: 'id0'}), // id4 + id5
async.apply(db.newPico, {parent_id: 'id0'}), // id6 + id7
async.apply(db.newPico, { parent_id: 'id0' }), // id2 + id3
async.apply(db.newPico, { parent_id: 'id0' }), // id4 + id5
async.apply(db.newPico, { parent_id: 'id0' }), // id6 + id7
async.apply(db.newPico, {parent_id: 'id6'}), // id8 + id9
async.apply(db.newPico, {parent_id: 'id6'}), // id10 + id11
async.apply(db.newPico, { parent_id: 'id6' }), // id8 + id9
async.apply(db.newPico, { parent_id: 'id6' }), // id10 + id11

@@ -831,3 +831,3 @@ assertParent('id0', null),

await db.newChannelYieldable({pico_id: 'id0', name: 'two', type: 't'})
await db.newChannelYieldable({ pico_id: 'id0', name: 'two', type: 't' })
await assertECIs('id0', ['id1', 'id2'])

@@ -844,3 +844,3 @@

await db.newPicoYieldable({parent_id: 'id0'})
await db.newPicoYieldable({ parent_id: 'id0' })
await assertECIs('id3', ['id4'])

@@ -867,5 +867,5 @@

await put('foo', null, {a: 3, b: 4})
await put('foo', null, { a: 3, b: 4 })
data = await get('foo', null)
t.deepEquals(data, {a: 3, b: 4})
t.deepEquals(data, { a: 3, b: 4 })
t.ok(ktypes.isMap(data))

@@ -877,8 +877,8 @@

await put('foo', null, {one: 11, two: 22})
await put('foo', null, { one: 11, two: 22 })
data = await get('foo', null)
t.deepEquals(data, {one: 11, two: 22})
await put('foo', null, {one: 11})
t.deepEquals(data, { one: 11, two: 22 })
await put('foo', null, { one: 11 })
data = await get('foo', null)
t.deepEquals(data, {one: 11})
t.deepEquals(data, { one: 11 })

@@ -888,21 +888,21 @@ data = await get('foo', 'one')

await put('foo', ['bar', 'baz'], {qux: 1})
await put('foo', ['bar', 'baz'], { qux: 1 })
data = await get('foo', null)
t.deepEquals(data, {one: 11, bar: {baz: {qux: 1}}})
t.deepEquals(data, { one: 11, bar: { baz: { qux: 1 } } })
await put('foo', ['bar', 'asdf'], true)
data = await get('foo', null)
t.deepEquals(data, {one: 11,
t.deepEquals(data, { one: 11,
bar: {
baz: {qux: 1},
baz: { qux: 1 },
asdf: true
}})
} })
await put('foo', ['bar', 'baz', 'qux'], 'wat?')
data = await get('foo', null)
t.deepEquals(data, {one: 11,
t.deepEquals(data, { one: 11,
bar: {
baz: {qux: 'wat?'},
baz: { qux: 'wat?' },
asdf: true
}})
} })
data = await get('foo', ['bar', 'baz', 'qux'])

@@ -913,7 +913,7 @@ t.deepEquals(data, 'wat?')

data = await get('foo', null)
t.deepEquals(data, {bar: {baz: {qux: 'wat?'}, asdf: true}})
t.deepEquals(data, { bar: { baz: { qux: 'wat?' }, asdf: true } })
await del('foo', ['bar', 'asdf'])
data = await get('foo', null)
t.deepEquals(data, {bar: {baz: {qux: 'wat?'}}})
t.deepEquals(data, { bar: { baz: { qux: 'wat?' } } })

@@ -973,13 +973,13 @@ await del('foo', ['bar', 'baz', 'qux'])

await put('foo', ['wat'], 'da')
await tst('foo', 'Map', {0: 'aaa', 1: 'bbb', wat: 'da'}, '`foo` is now a map')
await tst('foo', 'Map', { 0: 'aaa', 1: 'bbb', wat: 'da' }, '`foo` is now a map')
// once a map, always a map
await del('foo', ['wat'])
await tst('foo', 'Map', {0: 'aaa', 1: 'bbb'}, '`foo` is still a map')
await tst('foo', 'Map', { 0: 'aaa', 1: 'bbb' }, '`foo` is still a map')
await put('foo', [2], 'ccc')
await tst('foo', 'Map', {0: 'aaa', 1: 'bbb', 2: 'ccc'}, '`foo` is still a map')
await tst('foo', 'Map', { 0: 'aaa', 1: 'bbb', 2: 'ccc' }, '`foo` is still a map')
// infered as map if it's a string
await put('bar', ['0'], 'aaa')
await tst('bar', 'Map', {0: 'aaa'}, '`bar` is a map since the first key was a string')
await tst('bar', 'Map', { 0: 'aaa' }, '`bar` is a map since the first key was a string')

@@ -992,3 +992,3 @@ // infered as an Array b/c the key is a positive integer

await put('baz', ['1'], 'bbb')
await tst('baz', 'Map', {1: 'bbb', 2: 'ccc'}, '`baz` is now a Map')
await tst('baz', 'Map', { 1: 'bbb', 2: 'ccc' }, '`baz` is now a Map')

@@ -995,0 +995,0 @@ // initialzed as array should db dump as an array

@@ -416,3 +416,3 @@ var _ = require('lodash')

err2.orig_error = err
emitter.emit('error', err2, {rid: rid})
emitter.emit('error', err2, { rid: rid })
// disable the ruleset since it's broken

@@ -462,3 +462,3 @@ db.disableRuleset(rid, next)

err2.orig_error = err
emitter.emit('error', err2, {rid: rid})
emitter.emit('error', err2, { rid: rid })
})

@@ -484,3 +484,3 @@ return getRidOrder()

err2.orig_error = err
emitter.emit('error', err2, {rid: rid})
emitter.emit('error', err2, { rid: rid })
// disable the ruleset since it's broken

@@ -522,3 +522,3 @@ db.disableRuleset(rid, next)

onError: function (err) {
var info = {scheduler: true}
var info = { scheduler: true }
emitter.emit('error', err, info)

@@ -535,3 +535,3 @@ },

if (err) return callback(err)
core.registerRuleset(src, {url: url}, callback)
core.registerRuleset(src, { url: url }, callback)
})

@@ -650,3 +650,3 @@ }

db.enableRuleset(data.hash, function (err) {
next(err, {rs: rs, hash: data.hash})
next(err, { rs: rs, hash: data.hash })
})

@@ -653,0 +653,0 @@ })

@@ -24,3 +24,3 @@ var dbRange = require('../dbRange')

dbOps.push({type: 'del', key: data.key})
dbOps.push({ type: 'del', key: data.key })
}, function (err) {

@@ -27,0 +27,0 @@ if (err) return callback(err)

@@ -25,3 +25,3 @@ var dbRange = require('../dbRange')

dbOps.push({type: 'del', key: data.key})
dbOps.push({ type: 'del', key: data.key })
}, function (err) {

@@ -28,0 +28,0 @@ if (err) return callback(err)

@@ -23,3 +23,3 @@ var dbRange = require('../dbRange')

dbOps.push({type: 'del', key: data.key})
dbOps.push({ type: 'del', key: data.key })
}, function (err) {

@@ -26,0 +26,0 @@ if (err) return callback(err)

@@ -33,4 +33,4 @@ var _ = require('lodash')

dbOps.push({type: 'del', key: data.key})
dbOps.push({type: 'del', key: ['eci-to-pico_id', eci]})
dbOps.push({ type: 'del', key: data.key })
dbOps.push({ type: 'del', key: ['eci-to-pico_id', eci] })
}, function (err) {

@@ -37,0 +37,0 @@ if (err) return callback(err)

@@ -31,3 +31,3 @@ var dbRange = require('../dbRange')

dbOps.push({type: 'del', key: data.key})
dbOps.push({ type: 'del', key: data.key })
}, function (err) {

@@ -34,0 +34,0 @@ if (err) return callback(err)

@@ -16,3 +16,3 @@ var _ = require('lodash')

var indexType = ktypes.typeOf(val)
var rootValue = {type: indexType}
var rootValue = { type: indexType }
switch (indexType) {

@@ -19,0 +19,0 @@ case 'Null':

@@ -31,3 +31,3 @@ var _ = require('lodash')

put('v0', {foo: 'bar', baz: 1})
put('v0', { foo: 'bar', baz: 1 })
put('v1', [1, 2, 3, 'ok'])

@@ -60,3 +60,3 @@ put('v2', 'hi')

type: 'Map',
value: {foo: 'bar', baz: 1}
value: { foo: 'bar', baz: 1 }
})

@@ -63,0 +63,0 @@ t.deepEquals(entvars.v1, {

@@ -20,4 +20,4 @@ var _ = require('lodash')

name: 'admin channel policy',
event: {allow: [{}]},
query: {allow: [{}]}
event: { allow: [{}] },
query: { allow: [{}] }
}

@@ -24,0 +24,0 @@ })

@@ -30,3 +30,3 @@ var _ = require('lodash')

dbOps.push({type: 'del', key: data.key})
dbOps.push({ type: 'del', key: data.key })
}, function (err) {

@@ -33,0 +33,0 @@ if (err) return callback(err)

@@ -29,4 +29,4 @@ var _ = require('lodash')

}
dbOps.push({type: 'put', key: newKey, value: data.value})
dbOps.push({type: 'del', key: data.key})
dbOps.push({ type: 'put', key: newKey, value: data.value })
dbOps.push({ type: 'del', key: data.key })
}, next)

@@ -33,0 +33,0 @@ }, function (err) {

@@ -25,3 +25,3 @@ var _ = require('lodash')

src: 'ruleset ' + rid + '{}',
meta: {url: testRulesets[rid].url}
meta: { url: testRulesets[rid].url }
}

@@ -28,0 +28,0 @@ })

@@ -130,5 +130,5 @@ var _ = require('lodash')

if (url === 'http://foo.bar/baz/qux.krl') {
return callback(null, [{rid: 'found'}])
return callback(null, [{ rid: 'found' }])
} else if (url === 'file:///too/many.krl') {
return callback(null, [{rid: 'a'}, {rid: 'b'}, {rid: 'c'}])
return callback(null, [{ rid: 'a' }, { rid: 'b' }, { rid: 'c' }])
}

@@ -241,3 +241,3 @@ callback(null, [])

await tstErr(
engine.def.unregisterRuleset({}, {rid: {}}),
engine.def.unregisterRuleset({}, { rid: {} }),
'TypeError: engine:unregisterRuleset was given [Map] instead of a rid string or array'

@@ -267,3 +267,3 @@ )

var desc = await descRID(ctx, {rid: 'io.picolabs.hello_world'})
var desc = await descRID(ctx, { rid: 'io.picolabs.hello_world' })

@@ -306,3 +306,3 @@ var isIsoString = function (str) {

t.equals(await descRID(ctx, {rid: 'not.found'}), void 0)
t.equals(await descRID(ctx, { rid: 'not.found' }), void 0)
})

@@ -357,5 +357,5 @@

await newPico({pico_id: 'id0'}, [])// id2
await newPico({ pico_id: 'id0' }, [])// id2
await newPico({}, ['id0'])// id4
await newPico({pico_id: 'id2'}, [])// id6
await newPico({ pico_id: 'id2' }, [])// id6

@@ -378,7 +378,7 @@ t.equals(await getParent({}, ['id0']), null)

// fallback on ctx.pico_id
t.equals(await getParent({pico_id: 'id6'}, []), 'id2')
t.equals(await getAdminECI({pico_id: 'id6'}, []), 'id7')
t.deepEquals(await listChildren({pico_id: 'id2'}, []), ['id6'])
t.equals(await removePico({pico_id: 'id6'}, []), true)
t.equals(await removePico({pico_id: 'id6'}, []), false)
t.equals(await getParent({ pico_id: 'id6' }, []), 'id2')
t.equals(await getAdminECI({ pico_id: 'id6' }, []), 'id7')
t.deepEquals(await listChildren({ pico_id: 'id2' }, []), ['id6'])
t.equals(await removePico({ pico_id: 'id6' }, []), true)
t.equals(await removePico({ pico_id: 'id6' }, []), false)
strictDeepEquals(t, await listChildren({}, ['id2']), [])

@@ -388,3 +388,3 @@

var assertInvalidPicoID = function (genfn, id, expected) {
return tstErr(genfn({pico_id: id}, []), expected)
return tstErr(genfn({ pico_id: id }, []), expected)
}

@@ -399,4 +399,4 @@

t.equals(await getAdminECI({}, ['id404']), void 0)
t.equals(await getParent({pico_id: 'id404'}, []), void 0)
t.equals(await listChildren({pico_id: 'id404'}, []), void 0)
t.equals(await getParent({ pico_id: 'id404' }, []), void 0)
t.equals(await listChildren({ pico_id: 'id404' }, []), void 0)
await assertInvalidPicoID(newPico, 'id404', 'NotFoundError: Pico not found: id404')

@@ -425,3 +425,3 @@ t.equals(await removePico({}, ['id404']), false)

await tstErr(newPolicy(), 'TypeError: Policy definition should be a Map, but was Null')
await tstErr(newPolicy({name: 1}), 'Error: missing `policy.name`')
await tstErr(newPolicy({ name: 1 }), 'Error: missing `policy.name`')

@@ -431,4 +431,4 @@ var pAdmin = {

name: 'admin channel policy',
event: {allow: [{}]},
query: {allow: [{}]}
event: { allow: [{}] },
query: { allow: [{}] }
}

@@ -438,8 +438,8 @@

var pFoo = await newPolicy({name: 'foo'})
var pFoo = await newPolicy({ name: 'foo' })
t.deepEquals(pFoo, {
id: 'id2',
name: 'foo',
event: {deny: [], allow: []},
query: {deny: [], allow: []}
event: { deny: [], allow: [] },
query: { deny: [], allow: [] }
})

@@ -451,3 +451,3 @@

name: 'bar',
event: {allow: [{domain: 'system'}]}
event: { allow: [{ domain: 'system' }] }
})

@@ -457,4 +457,4 @@ t.deepEquals(pBar, {

name: 'bar',
event: {deny: [], allow: [{domain: 'system'}]},
query: {deny: [], allow: []}
event: { deny: [], allow: [{ domain: 'system' }] },
query: { deny: [], allow: [] }
})

@@ -550,10 +550,10 @@

// fallback on ctx.pico_id
t.deepEquals(await listChannels({pico_id: 'id0'}, []), [
t.deepEquals(await listChannels({ pico_id: 'id0' }, []), [
mkChan('id0', 'id1', 'admin', 'secret')
])
t.deepEquals(await newChannel({pico_id: 'id0'}, {'name': 'a', 'type': 'b'}), mkChan('id0', 'id3', 'a', 'b'))
t.deepEquals(await newChannel({ pico_id: 'id0' }, { 'name': 'a', 'type': 'b' }), mkChan('id0', 'id3', 'a', 'b'))
// report error on invalid pico_id
var assertInvalidPicoID = function (genfn, id, expected) {
return tstErr(genfn({pico_id: id}, {'name': 'a', 'type': 'b'}), expected)
return tstErr(genfn({ pico_id: id }, { 'name': 'a', 'type': 'b' }), expected)
}

@@ -571,3 +571,3 @@

var pFoo = await newPolicy({name: 'foo'})
var pFoo = await newPolicy({ name: 'foo' })
t.deepEquals(await newChannel({}, ['id0', 'a', 'b', pFoo.id]), mkChan('id0', 'id5', 'a', 'b', pFoo.id))

@@ -587,3 +587,3 @@ })

t.deepEquals(await listRIDs({pico_id: 'id0'}, []), [
t.deepEquals(await listRIDs({ pico_id: 'id0' }, []), [
'io.picolabs.engine'

@@ -609,7 +609,7 @@ ])

await tstErr(
installRS({'pico_id': 'id0'}, {'url': {}}),
installRS({ 'pico_id': 'id0' }, { 'url': {} }),
'TypeError: engine:installRuleset was given [Map] instead of a url string',
'wrong url type'
)
t.deepEquals(await listRIDs({pico_id: 'id0'}, []), [
t.deepEquals(await listRIDs({ pico_id: 'id0' }, []), [
'io.picolabs.engine',

@@ -635,3 +635,3 @@ 'io.picolabs.hello_world'

)
t.deepEquals(await listRIDs({pico_id: 'id0'}, []), [
t.deepEquals(await listRIDs({ pico_id: 'id0' }, []), [
'io.picolabs.hello_world'

@@ -641,9 +641,9 @@ ])

// fallback on ctx.pico_id
t.equals(await uninstallRID({pico_id: 'id0'}, {rid: 'io.picolabs.hello_world'}), void 0)
strictDeepEquals(t, await listRIDs({pico_id: 'id0'}, []), [])
t.equals(await installRS({pico_id: 'id0'}, {rid: 'io.picolabs.hello_world'}), 'io.picolabs.hello_world')
t.equals(await uninstallRID({ pico_id: 'id0' }, { rid: 'io.picolabs.hello_world' }), void 0)
strictDeepEquals(t, await listRIDs({ pico_id: 'id0' }, []), [])
t.equals(await installRS({ pico_id: 'id0' }, { rid: 'io.picolabs.hello_world' }), 'io.picolabs.hello_world')
// report error on invalid pico_id
var assertInvalidPicoID = function (genfn, id, expected) {
return tstErr(genfn({pico_id: id}, {rid: 'io.picolabs.hello_world'}), expected)
return tstErr(genfn({ pico_id: id }, { rid: 'io.picolabs.hello_world' }), expected)
}

@@ -655,3 +655,3 @@

await assertInvalidPicoID(uninstallRID, 'id404', 'NotFoundError: Pico not found: id404')
t.deepEquals(await listRIDs({pico_id: 'id404'}, []), void 0)
t.deepEquals(await listRIDs({ pico_id: 'id404' }, []), void 0)
})

@@ -658,0 +658,0 @@

@@ -36,3 +36,3 @@ var _ = require('lodash')

url: url,
headers: {'content-type': 'application/json'},
headers: { 'content-type': 'application/json' },
body: ktypes.encode(event.attrs)

@@ -39,0 +39,0 @@ }, function (err, res, body) {

@@ -11,3 +11,3 @@ // var _ = require("lodash");

t.equals(
await kevent.def.attr({event: {attrs: {foo: 'bar'}}}, ['foo']),
await kevent.def.attr({ event: { attrs: { foo: 'bar' } } }, ['foo']),
'bar'

@@ -19,3 +19,3 @@ )

t.equals(
await kevent.def.attr({event: {attrs: {foo: 'bar'}}}, ['baz']),
await kevent.def.attr({ event: { attrs: { foo: 'bar' } } }, ['baz']),
null

@@ -55,3 +55,3 @@ )

type: 'some-t',
attrs: {foo: {}, bar: [], baz: {'q': function () {}}}
attrs: { foo: {}, bar: [], baz: { 'q': function () {} } }
},

@@ -58,0 +58,0 @@ host: host

@@ -58,3 +58,3 @@ var _ = require('lodash')

resp = await doHttp('get', [url, {a: 1}])
resp = await doHttp('get', [url, { a: 1 }])
resp.content = JSON.parse(resp.content)

@@ -83,7 +83,7 @@ t.deepEquals(resp, {

url: url,
qs: {'baz': 'qux'},
headers: {'some': 'header'},
qs: { 'baz': 'qux' },
headers: { 'some': 'header' },
body: 'some post data',
json: {'json': "get's overriden by raw body"},
form: {'form': "get's overriden by raw body"},
json: { 'json': "get's overriden by raw body" },
form: { 'form': "get's overriden by raw body" },
auth: {

@@ -120,5 +120,5 @@ username: 'bob',

url: url,
qs: {'baz': 'qux'},
headers: {'some': 'header'},
form: {formkey: 'formval', foo: ['bar', 'baz']}
qs: { 'baz': 'qux' },
headers: { 'some': 'header' },
form: { formkey: 'formval', foo: ['bar', 'baz'] }
})

@@ -151,5 +151,5 @@ resp.content = JSON.parse(resp.content)

url: url,
qs: {'baz': 'qux'},
headers: {'some': 'header'},
json: {formkey: 'formval', foo: ['bar', 'baz']}
qs: { 'baz': 'qux' },
headers: { 'some': 'header' },
json: { formkey: 'formval', foo: ['bar', 'baz'] }
})

@@ -221,4 +221,4 @@ resp.content = JSON.parse(resp.content)

var numMethods = _.size(methods)
var errArg = {parseJSON: true}
var typeErrArg = {url: NaN}
var errArg = { parseJSON: true }
var typeErrArg = { url: NaN }

@@ -225,0 +225,0 @@ var i

@@ -31,3 +31,3 @@ var _ = require('lodash')

}
return {var_name: ktypes.toString(id)}
return { var_name: ktypes.toString(id) }
}

@@ -34,0 +34,0 @@

@@ -6,3 +6,3 @@ var _ = require('lodash')

var supportedHashFns = crypto.getHashes()
var hashAlgorithms = Object.freeze(crypto.getHashes())

@@ -35,29 +35,73 @@ module.exports = function (core) {

hashFunctions: mkKRLfn([
hashAlgorithms: hashAlgorithms,
hashFunctions: mkKRLfn([// DEPRECATED
], function (ctx, args, callback) {
callback(null, supportedHashFns)
ctx.log('warn', 'math:hashFunctions() is DEPRECATED use math:hashAlgorithms instead')
callback(null, hashAlgorithms)
}),
hash: mkKRLfn([
'hashFn',
'toHash'
'algorithm',
'str',
'encoding'
], function (ctx, args, callback) {
if (!_.has(args, 'hashFn')) {
return callback(new Error('math:hash needs a hashFn string'))
if (!_.has(args, 'algorithm')) {
return callback(new Error('math:hash needs a algorithm string'))
}
if (!_.has(args, 'toHash')) {
return callback(new Error('math:hash needs a toHash string'))
if (!_.has(args, 'str')) {
return callback(new Error('math:hash needs a str string'))
}
if (!_.includes(supportedHashFns, args.hashFn)) {
if (ktypes.isString(args.hashFn)) {
callback(new Error("math:hash doesn't recognize the hash algorithm " + args.hashFn))
if (!_.includes(hashAlgorithms, args.algorithm)) {
if (ktypes.isString(args.algorithm)) {
return callback(new Error("math:hash doesn't recognize the hash algorithm " + args.algorithm))
} else {
callback(new TypeError('math:hash was given ' + ktypes.toString(args.hashFn) + ' instead of a hashFn string'))
return callback(new TypeError('math:hash was given ' + ktypes.toString(args.algorithm) + ' instead of a algorithm string'))
}
}
var encoding = args.encoding || 'hex'
if (!_.includes(['hex', 'base64'], encoding)) {
return callback(new Error('math:hash encoding must be "hex" or "base64" but was ' + encoding))
}
var str = ktypes.toString(args.toHash)
var hash = crypto.createHash(args.hashFn).update(str)
var str = ktypes.toString(args.str)
var hash = crypto.createHash(args.algorithm)
hash.update(str)
callback(null, hash.digest('hex'))
callback(null, hash.digest(encoding))
}),
hmac: mkKRLfn([
'algorithm',
'key',
'message',
'encoding'
], function (ctx, args, callback) {
if (!_.has(args, 'algorithm')) {
return callback(new Error('math:hmac needs a algorithm string'))
}
if (!_.has(args, 'key')) {
return callback(new Error('math:hmac needs a key string'))
}
if (!_.has(args, 'message')) {
return callback(new Error('math:hmac needs a message string'))
}
if (!_.includes(hashAlgorithms, args.algorithm)) {
if (ktypes.isString(args.algorithm)) {
return callback(new Error("math:hmac doesn't recognize the hash algorithm " + args.algorithm))
} else {
return callback(new TypeError('math:hmac was given ' + ktypes.toString(args.algorithm) + ' instead of a algorithm string'))
}
}
var encoding = args.encoding || 'hex'
if (!_.includes(['hex', 'base64'], encoding)) {
return callback(new Error('math:hmac encoding must be "hex" or "base64" but was ' + encoding))
}
var key = ktypes.toString(args.key)
var message = ktypes.toString(args.message)
var hmac = crypto.createHmac(args.algorithm, key)
hmac.update(message)
callback(null, hmac.digest(encoding))
})

@@ -64,0 +108,0 @@

@@ -19,3 +19,3 @@ var test = require('tape')

t.ok(await kmath.hashFunctions({}, []), 'hashFunctions should return something')
t.ok(Array.isArray(kmath.hashAlgorithms))

@@ -28,2 +28,7 @@ t.equals(

t.equals(
await kmath.hash({}, ['sha256', 'hello', 'base64']),
'LPJNul+wow4m6DsqxbninhsWHlwfp0JecwQzYpOLmCQ=',
'sha256 "hello" base64'
)
t.equals(
await kmath.hash({}, ['sha256', null]),

@@ -39,7 +44,18 @@ await kmath.hash({}, ['sha256', 'null']),

await terr('hash', {}, [], 'Error: math:hash needs a hashFn string')
await terr('hash', {}, [0], 'Error: math:hash needs a toHash string')
await terr('hash', {}, [0, null], 'TypeError: math:hash was given 0 instead of a hashFn string')
await terr('hash', {}, [], 'Error: math:hash needs a algorithm string')
await terr('hash', {}, [0], 'Error: math:hash needs a str string')
await terr('hash', {}, [0, null], 'TypeError: math:hash was given 0 instead of a algorithm string')
await terr('hash', {}, ['0', null], "Error: math:hash doesn't recognize the hash algorithm 0")
t.equals(
await kmath.hmac({}, ['sha256', 'a secret', 'some message']),
'86de43245b44531ac38b6a6a691996287d932a8dea03bc69b193b90caf48ff53'
)
t.equals(
await kmath.hmac({}, ['sha256', 'a secret', 'some message', 'base64']),
'ht5DJFtEUxrDi2pqaRmWKH2TKo3qA7xpsZO5DK9I/1M='
)
await terr('hmac', {}, [], 'Error: math:hmac needs a algorithm string')
await terr('hmac', {}, ['foo', '', ''], 'Error: math:hmac doesn\'t recognize the hash algorithm foo')
}()).then(t.end).catch(t.end)
})

@@ -13,3 +13,3 @@ var test = require('tape')

var now1 = await time.now(ctx, [
{tz: 'Australia/Sydney'}
{ tz: 'Australia/Sydney' }
])

@@ -56,23 +56,23 @@ t.ok(/^\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d\.\d\d\dZ$/.test(now0))

t.equals(
await time['add'](ctx, ['2017-01-01', {years: -2017}]),
await time['add'](ctx, ['2017-01-01', { years: -2017 }]),
'0000-01-01T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2017-01-01', {months: -22}]),
await time['add'](ctx, ['2017-01-01', { months: -22 }]),
'2015-03-01T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2010-08-08', {weeks: 5}]),
await time['add'](ctx, ['2010-08-08', { weeks: 5 }]),
'2010-09-12T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2010-08-08T05:00:00', {hours: 3}]),
await time['add'](ctx, ['2010-08-08T05:00:00', { hours: 3 }]),
'2010-08-08T08:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2017-01-01', {days: -10}]),
await time['add'](ctx, ['2017-01-01', { days: -10 }]),
'2016-12-22T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, ['2017-01-01', {minutes: 2, seconds: 90}]),
await time['add'](ctx, ['2017-01-01', { minutes: 2, seconds: 90 }]),
'2017-01-01T00:03:30.000Z'

@@ -82,11 +82,11 @@ )

t.equals(
await time['add'](ctx, [1967342, {'seconds': 'five'}]),
await time['add'](ctx, [1967342, { 'seconds': 'five' }]),
'1967-12-08T00:00:00.000Z'
)
t.equals(
await time['add'](ctx, [1967342, {'secondz': 5}]),
await time['add'](ctx, [1967342, { 'secondz': 5 }]),
'1967-12-08T00:00:00.000Z'
)
await terr('add', ctx, {'spec': {}}, 'Error: time:add needs a date string')
await terr('add', ctx, { 'spec': {} }, 'Error: time:add needs a date string')
await terr('add', ctx, [67342], 'Error: time:add needs a spec map')

@@ -93,0 +93,0 @@ await terr('add', ctx, [67342, 5], 'TypeError: time:add was given 67342 instead of a date string')

@@ -40,3 +40,3 @@ var _ = require('lodash')

var str = 'post'
var map = {"don't": 'mutate'}
var map = { "don't": 'mutate' }

@@ -51,3 +51,3 @@ var errMsg1 = 'Error: send_directive needs a name string'

await testErr([], errMsg1)
await testErr({'options': null}, errMsg1)
await testErr({ 'options': null }, errMsg1)
await testErr([map], errMsg2)

@@ -54,0 +54,0 @@ await testErr([map, map], errMsg2)

@@ -92,3 +92,3 @@ var _ = require('lodash')

job: conf.is_test_mode
? {handler: handler, cancel: _.noop}
? { handler: handler, cancel: _.noop }
: schedule.scheduleJob(timespec, handler)

@@ -95,0 +95,0 @@ }

@@ -28,20 +28,42 @@ var _ = require('lodash')

// only run the function if the domain and type match
var domain = ctx.event.domain
var type = ctx.event.type
if (_.get(rule, ['select', 'graph', domain, type, exp]) !== true) {
return false
var ee = _.get(rule, ['select', 'graph', ctx.event.domain, ctx.event.type, exp])
if (ee === true) {
return true
}
return runKRL(rule.select.eventexprs[exp], ctx, aggregator, getAttrString, setting)
if (_.isFunction(ee)) {
return runKRL(ee, ctx, aggregator, getAttrString, setting)
}
return false
}
async function getNextState (ctx, rule, currState, aggregator, setting) {
var stm = rule.select.state_machine[currState]
var i
for (i = 0; i < stm.length; i++) {
if (await evalExpr(ctx, rule, aggregator, stm[i][0], setting)) {
// found a match
return stm[i][1]
currState = _.flattenDeep([currState])
let matches = []
// run every event expression that can match, and collect a unique list of next states
for (let cstate of currState) {
let transitions = rule.select.state_machine[cstate]
for (let transition of transitions) {
let expr = transition[0]
let state = transition[1]
if (await evalExpr(ctx, rule, aggregator, expr, setting)) {
// found a match
if (matches.indexOf(state) < 0) {
matches.push(state)
}
}
}
}
if (_.includes(matches, 'end')) {
return 'end'
}
if (matches.length === 1) {
return matches[0]
}
if (matches.length > 1) {
// This can happen when two or more expressions match and reach different states. We want to "join" them at runtime
return matches
}
if (currState.length === 1) {
currState = currState[0]
}
if (currState === 'end') {

@@ -48,0 +70,0 @@ return 'start'

Sorry, the diff of this file is too big to display

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc